hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
8744bb3e64b04286013f99155a9d8703df12a0a6
8,798
py
Python
core/api/tests/test_unit_views.py
divinedeveloper/mini-quora
4465937c2a3cd35387f75f04b9e705a8504d9dab
[ "Apache-2.0" ]
null
null
null
core/api/tests/test_unit_views.py
divinedeveloper/mini-quora
4465937c2a3cd35387f75f04b9e705a8504d9dab
[ "Apache-2.0" ]
null
null
null
core/api/tests/test_unit_views.py
divinedeveloper/mini-quora
4465937c2a3cd35387f75f04b9e705a8504d9dab
[ "Apache-2.0" ]
null
null
null
from rest_framework.test import APIRequestFactory, APIClient from rest_framework import status from django.conf import settings from core.api.models import User, Question, Answer, Tenant from core.api.custom_exceptions import CustomApiException from core.api.services import Services from core.api.serializers import UserSerializer, QuestionSerializer, AnswerSerializer, TenantSerializer from core.api.views import search_questions, dashboard, tenants_dashboard from django.core.urlresolvers import reverse from django.db.models import Q import pytest from pytest_mock import mocker from django.core.management import call_command import json # Create your tests here. @pytest.mark.unittest class TestUnitViews: def setup_method(self): """ Get instance of APIRequestFactory To mock request object which will be directly passed to views as a first argument """ self.request_factory = APIRequestFactory() self.search_questions_api_url = reverse("search_questions") self.dashboard_api_url = reverse("dashboard") self.tenants_dashboard_api_url = reverse("tenants_dashboard") def test_unit_search_questions_blank_title(self, db, django_db_setup, mocker): """ This method will tests search questions in views assert status is 200 and data is returned as per request """ params = {'title': '', 'offset': '0', 'limit' : '10'} request = self.request_factory.get(self.search_questions_api_url, params, HTTP_API_KEY = "6e762d97-2d46-48cc-99b6-58cc0942d514") mocker.patch.object(Services, 'service_search_questions') mocked_serializer = mocker.patch('core.api.serializers.QuestionSerializer') questions = Question.objects.filter(private = False)[params['offset']:params['limit']] count = Question.objects.filter(private = False).count() Services.service_search_questions.return_value = questions, count response = search_questions(request) Services.service_search_questions.assert_called_with(params['title'], params['offset'], params['limit']) mocked_serialize_response = mocked_serializer(questions) mocked_serializer.assert_called_with(questions) question_serializer = QuestionSerializer(questions, many=True) json_response = json.loads(response.content) assert response.status_code == status.HTTP_200_OK assert json_response['count'] == count assert json_response['questions'] != None assert json_response['questions'] == question_serializer.data def test_unit_search_questions_with_title(self, db, django_db_setup, mocker): """ This method will tests search questions in views assert status is 200 and questions are returned """ params = {'title': 'Sapiente', 'offset': '0', 'limit' : '10'} request = self.request_factory.get(self.search_questions_api_url, params, HTTP_API_KEY = "6e762d97-2d46-48cc-99b6-58cc0942d514") mocker.patch.object(Services, 'service_search_questions') mocked_serializer = mocker.patch('core.api.serializers.QuestionSerializer') questions = Question.objects.filter(Q(private = False) & Q(title__icontains = params['title']))[params['offset']:params['limit']] count = Question.objects.filter(Q(private = False) & Q(title__icontains = params['title'])).count() Services.service_search_questions.return_value = questions, count response = search_questions(request) Services.service_search_questions.assert_called_with(params['title'], params['offset'], params['limit']) mocked_serialize_response = mocked_serializer(questions) mocked_serializer.assert_called_with(questions) json_response = json.loads(response.content) question_serializer = QuestionSerializer(questions, many=True) assert response.status_code == status.HTTP_200_OK assert json_response['count'] == count assert json_response['questions'] != None assert json_response['questions'] == question_serializer.data def test_unit_questions_not_found(self, db, django_db_setup, mocker): """ This method will tests search questions in views assert status is 404 and questions not found message is returned """ params = {'title': 'myquestion', 'offset': '0', 'limit' : '10'} request = self.request_factory.get(self.search_questions_api_url, params, HTTP_API_KEY = "6e762d97-2d46-48cc-99b6-58cc0942d514") mock_service_search_questions = mocker.patch.object(Services, 'service_search_questions') mock_service_search_questions.side_effect = CustomApiException("Questions not found based on criteria", status.HTTP_404_NOT_FOUND) response = search_questions(request) Services.service_search_questions.assert_called_with(params['title'], params['offset'], params['limit']) json_response = json.loads(response.content) assert response.status_code == status.HTTP_404_NOT_FOUND assert json_response['message'] == "Questions not found based on criteria" def test_unit_dashboard_with_non_zero_counts(self, db, django_db_setup, mocker): """ This method will tests dashboard in views assert status is 200 and counts are returned """ request = self.request_factory.get(self.dashboard_api_url) mocker.patch.object(Services, 'service_dashboard') user_count = User.objects.count() question_count = Question.objects.count() answer_count = Answer.objects.count() Services.service_dashboard.return_value = user_count, question_count, answer_count response = dashboard(request) json_response = json.loads(response.content) assert Services.service_dashboard.called assert response.status_code == status.HTTP_200_OK assert json_response['user_count'] == user_count assert json_response['question_count'] == question_count assert json_response['answer_count'] == answer_count def test_unit_dashboard_with_zero_counts_on_blank_db(self, mocker): """ This method will tests dashboard in views assert status is 200 and all 0 counts are returned """ request = self.request_factory.get(self.dashboard_api_url) mocker.patch.object(Services, 'service_dashboard') user_count = 0 question_count = 0 answer_count = 0 Services.service_dashboard.return_value = user_count, question_count, answer_count response = dashboard(request) json_response = json.loads(response.content) assert Services.service_dashboard.called assert response.status_code == status.HTTP_200_OK assert json_response['user_count'] == user_count assert json_response['question_count'] == question_count assert json_response['answer_count'] == answer_count def test_unit_tenants_dashboard(self, db, django_db_setup, mocker): """ This method will tests tenants dashboard in views assert status is 200 and tenants with counts are returned """ params = {'offset': '0', 'limit' : '10'} request = self.request_factory.get(self.tenants_dashboard_api_url, params) mocker.patch.object(Services, 'service_tenants_dashboard') mocked_serializer = mocker.patch('core.api.serializers.TenantSerializer') tenants = Tenant.objects.all()[params['offset']:params['limit']] tenants_count = Tenant.objects.count() Services.service_tenants_dashboard.return_value = tenants, tenants_count response = tenants_dashboard(request) Services.service_tenants_dashboard.assert_called_with(params['offset'], params['limit']) mocked_serialize_response = mocked_serializer(tenants) mocked_serializer.assert_called_with(tenants) tenants_serializer = TenantSerializer(tenants, many=True) json_response = json.loads(response.content) assert response.status_code == status.HTTP_200_OK assert json_response['count'] == tenants_count assert json_response['tenants'] != None assert json_response['tenants'] == tenants_serializer.data def test_unit_tenants_dashboard_on_blank_db(self, mocker): """ This method will tests dashboard in views assert status is 200 and counts are returned """ params = {'offset': '0', 'limit' : '10'} request = self.request_factory.get(self.tenants_dashboard_api_url, params) mocker.patch.object(Services, 'service_tenants_dashboard') mocked_serializer = mocker.patch('core.api.serializers.TenantSerializer') tenants = [] tenants_count = 0 Services.service_tenants_dashboard.return_value = tenants, tenants_count response = tenants_dashboard(request) Services.service_tenants_dashboard.assert_called_with(params['offset'], params['limit']) mocked_serialize_response = mocked_serializer(tenants) mocked_serializer.assert_called_with(tenants) tenants_serializer = TenantSerializer(tenants, many=True) json_response = json.loads(response.content) assert response.status_code == status.HTTP_200_OK assert json_response['count'] == tenants_count assert json_response['tenants'] == [] def teardown_method(self): self.request_factory = None self.search_questions_api_url = None self.dashboard_api_url = None self.tenants_dashboard_api_url = None
40.173516
132
0.78211
1,126
8,798
5.854352
0.120782
0.056887
0.04915
0.036408
0.781098
0.73665
0.706614
0.699484
0.682342
0.682342
0
0.017275
0.118322
8,798
218
133
40.357798
0.832538
0.094681
0
0.577778
0
0
0.115042
0.048613
0
0
0
0
0.266667
1
0.066667
false
0
0.103704
0
0.177778
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
8758d143964928aa668be5d70ffddfee8938cfe8
1,166
py
Python
setup.py
acheamponge/ninsiiah
c7f7664f1a3831180ceda1f3befc9d5199f7c169
[ "MIT" ]
1
2019-07-04T15:00:23.000Z
2019-07-04T15:00:23.000Z
setup.py
acheamponge/ninsiiah
c7f7664f1a3831180ceda1f3befc9d5199f7c169
[ "MIT" ]
null
null
null
setup.py
acheamponge/ninsiiah
c7f7664f1a3831180ceda1f3befc9d5199f7c169
[ "MIT" ]
1
2020-09-18T21:13:58.000Z
2020-09-18T21:13:58.000Z
import io import os import re from setuptools import find_packages from setuptools import setup def read(filename): filename = os.path.join(os.path.dirname(__file__), filename) text_type = type(u"") with io.open(filename, mode="r", encoding='utf-8') as fd: return re.sub(text_type(r':[a-z]+:`~?(.*?)`'), text_type(r'``\1``'), fd.read()) setup( name="mooc", version="0.0.1", url="https://github.com/acheamponge/mooc", license='MIT', author="Emmanuel Acheampong", author_email="achampion.emma@gmail.com", description="This is a python package for helping the online learning community.", long_description=read("README.rst"), packages=find_packages(exclude=('tests',)), install_requires=[], classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', ], )
27.116279
87
0.623499
141
1,166
5.070922
0.553191
0.212587
0.27972
0.181818
0
0
0
0
0
0
0
0.018478
0.210978
1,166
42
88
27.761905
0.758696
0
0
0
0
0
0.41681
0.020583
0
0
0
0
0
1
0.03125
false
0
0.15625
0
0.21875
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
875bff5c89e6b3148b510280dc4930b871207258
1,295
py
Python
test/test_edit_api.py
Cloudmersive/Cloudmersive.APIClient.Python.ImageRecognition
280666acc0b34d905ff54fe2aaec1768a0a3d0e7
[ "Apache-2.0" ]
1
2018-06-24T01:33:50.000Z
2018-06-24T01:33:50.000Z
test/test_edit_api.py
Cloudmersive/Cloudmersive.APIClient.Python.ImageRecognition
280666acc0b34d905ff54fe2aaec1768a0a3d0e7
[ "Apache-2.0" ]
null
null
null
test/test_edit_api.py
Cloudmersive/Cloudmersive.APIClient.Python.ImageRecognition
280666acc0b34d905ff54fe2aaec1768a0a3d0e7
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 """ imageapi Image Recognition and Processing APIs let you use Machine Learning to recognize and process images, and also perform useful image modification operations. # noqa: E501 OpenAPI spec version: v1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import cloudmersive_image_api_client from cloudmersive_image_api_client.api.edit_api import EditApi # noqa: E501 from cloudmersive_image_api_client.rest import ApiException class TestEditApi(unittest.TestCase): """EditApi unit test stubs""" def setUp(self): self.api = cloudmersive_image_api_client.api.edit_api.EditApi() # noqa: E501 def tearDown(self): pass def test_edit_composite_basic(self): """Test case for edit_composite_basic Composite two images together # noqa: E501 """ pass def test_edit_draw_rectangle(self): """Test case for edit_draw_rectangle Draw rectangle onto an image # noqa: E501 """ pass def test_edit_draw_text(self): """Test case for edit_draw_text Draw text onto an image # noqa: E501 """ pass if __name__ == '__main__': unittest.main()
23.125
172
0.685714
165
1,295
5.127273
0.448485
0.056738
0.094563
0.122931
0.306147
0.243499
0.148936
0
0
0
0
0.020346
0.240927
1,295
55
173
23.545455
0.840285
0.430116
0
0.222222
0
0
0.012559
0
0
0
0
0
0
1
0.277778
false
0.222222
0.277778
0
0.611111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
876816723e59820da15f88025d9b428f00d9e81c
9,374
py
Python
hybridbackend/tensorflow/data/parquet_dataset_ragged_test.py
alibaba/HybridBackend
498f74038fbc3be4ab1de6a8c3c2ef99f39af5e3
[ "Apache-2.0" ]
38
2021-12-01T06:54:36.000Z
2022-03-23T11:23:21.000Z
hybridbackend/tensorflow/data/parquet_dataset_ragged_test.py
alibaba/HybridBackend
498f74038fbc3be4ab1de6a8c3c2ef99f39af5e3
[ "Apache-2.0" ]
15
2021-12-01T09:15:26.000Z
2022-03-28T02:49:21.000Z
hybridbackend/tensorflow/data/parquet_dataset_ragged_test.py
alibaba/HybridBackend
498f74038fbc3be4ab1de6a8c3c2ef99f39af5e3
[ "Apache-2.0" ]
8
2021-12-02T01:16:14.000Z
2022-01-28T04:51:16.000Z
# Copyright 2021 Alibaba Group Holding Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= r'''Parquet batch dataset ragged tensors test. ''' from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import os import pandas as pd from six.moves import xrange # pylint: disable=redefined-builtin import tempfile import tensorflow as tf import hybridbackend.tensorflow as hb import hybridbackend.test as hbtest import unittest # pylint: disable=missing-docstring class ParquetDatasetRaggedTest(unittest.TestCase): def setUp(self): # pylint: disable=invalid-name os.environ['CUDA_VISIBLE_DEVICES'] = '' self._workspace = tempfile.mkdtemp() self._filename = os.path.join(self._workspace, 'ragged_test.parquet') num_cols = 3 self._df = pd.DataFrame( np.array([ [ np.random.randint( 0, 100, size=(np.random.randint(1, 5),), dtype=np.int64) for _ in xrange(num_cols)] for _ in xrange(100)], dtype=object), columns=[f'col{c}' for c in xrange(num_cols)]) self._df.to_parquet(self._filename) def tearDown(self): # pylint: disable=invalid-name os.remove(self._filename) del os.environ['CUDA_VISIBLE_DEVICES'] def test_read(self): batch_size = 32 with tf.Graph().as_default() as graph: ds = hb.data.ParquetDataset( [self._filename], batch_size=batch_size, fields=['col2', 'col0']) ds = ds.prefetch(4) batch = hb.data.make_one_shot_iterator(ds).get_next() c = self._df['col0'] with tf.Session(graph=graph) as sess: for i in xrange(3): result = sess.run(batch) start_row = i * batch_size end_row = (i + 1) * batch_size expected_items = c[start_row:end_row].to_numpy().tolist() expected_values = [] expected_splits = [0] for item in expected_items: expected_values.extend(item) expected_splits.append(expected_splits[-1] + len(item)) expected = hb.data.DataFrame.Value( np.array(expected_values), [np.array(expected_splits, dtype=np.int32)]) actual = result['col0'] np.testing.assert_allclose(actual.values, expected.values) np.testing.assert_equal( actual.nested_row_splits, expected.nested_row_splits) def test_to_sparse(self): batch_size = 32 with tf.Graph().as_default() as graph: ds = hb.data.ParquetDataset( [self._filename], batch_size=batch_size, fields=['col2', 'col0']) ds = ds.prefetch(4) batch = hb.data.make_one_shot_iterator(ds).get_next() batch = hb.data.DataFrame.to_sparse(batch) c = self._df['col0'] with tf.Session(graph=graph) as sess: for i in xrange(3): result = sess.run(batch) start_row = i * batch_size end_row = (i + 1) * batch_size expected_items = c[start_row:end_row].to_numpy().tolist() expected_values = [] expected_splits = [0] for item in expected_items: expected_values.extend(item) expected_splits.append(expected_splits[-1] + len(item)) expected = hb.data.DataFrame.Value( np.array(expected_values), [np.array(expected_splits, dtype=np.int32)]) actual = result['col0'] np.testing.assert_allclose(actual.values, expected.values) np.testing.assert_equal( len(set(list(zip(*actual.indices))[0])) + 1, len(expected.nested_row_splits[0])) def test_map_to_sparse(self): batch_size = 32 with tf.Graph().as_default() as graph: ds = hb.data.ParquetDataset( [self._filename], batch_size=batch_size, fields=['col2', 'col0']) ds = ds.map(hb.data.DataFrame.to_sparse) ds = ds.prefetch(4) batch = hb.data.make_one_shot_iterator(ds).get_next() c = self._df['col0'] with tf.Session(graph=graph) as sess: for i in xrange(3): result = sess.run(batch) start_row = i * batch_size end_row = (i + 1) * batch_size expected_items = c[start_row:end_row].to_numpy().tolist() expected_values = [] expected_splits = [0] for item in expected_items: expected_values.extend(item) expected_splits.append(expected_splits[-1] + len(item)) expected = hb.data.DataFrame.Value( np.array(expected_values), [np.array(expected_splits, dtype=np.int32)]) actual = result['col0'] np.testing.assert_allclose(actual.values, expected.values) np.testing.assert_equal( len(set(list(zip(*actual.indices))[0])) + 1, len(expected.nested_row_splits[0])) def test_apply_to_sparse(self): batch_size = 32 with tf.Graph().as_default() as graph: ds = hb.data.ParquetDataset( [self._filename], batch_size=batch_size, fields=['col2', 'col0']) ds = ds.apply(hb.data.to_sparse()) ds = ds.prefetch(4) batch = hb.data.make_one_shot_iterator(ds).get_next() c = self._df['col0'] with tf.Session(graph=graph) as sess: for i in xrange(3): result = sess.run(batch) start_row = i * batch_size end_row = (i + 1) * batch_size expected_items = c[start_row:end_row].to_numpy().tolist() expected_values = [] expected_splits = [0] for item in expected_items: expected_values.extend(item) expected_splits.append(expected_splits[-1] + len(item)) expected = hb.data.DataFrame.Value( np.array(expected_values), [np.array(expected_splits, dtype=np.int32)]) actual = result['col0'] np.testing.assert_equal(actual.values, expected.values) np.testing.assert_equal( len(set(list(zip(*actual.indices))[0])) + 1, len(expected.nested_row_splits[0])) def test_feedable_iterator(self): batch_size = 32 with tf.Graph().as_default() as graph: ds = hb.data.ParquetDataset( [self._filename], batch_size=batch_size, fields=['col2', 'col0']) ds = ds.apply(hb.data.to_sparse()) ds = ds.prefetch(4) it = hb.data.make_one_shot_iterator(ds) handle_tensor = it.string_handle() feedable_handle = tf.placeholder(tf.string, shape=[]) feedable_it = tf.data.Iterator.from_string_handle( feedable_handle, it.output_types, it.output_shapes, it.output_classes) batch = feedable_it.get_next() c = self._df['col0'] with tf.Session(graph=graph) as sess: handle_val = sess.run(handle_tensor) for i in xrange(3): result = sess.run(batch, feed_dict={feedable_handle: handle_val}) start_row = i * batch_size end_row = (i + 1) * batch_size expected_items = c[start_row:end_row].to_numpy().tolist() expected_values = [] expected_splits = [0] for item in expected_items: expected_values.extend(item) expected_splits.append(expected_splits[-1] + len(item)) expected = hb.data.DataFrame.Value( np.array(expected_values), [np.array(expected_splits, dtype=np.int32)]) actual = result['col0'] np.testing.assert_equal(actual.values, expected.values) np.testing.assert_equal( len(set(list(zip(*actual.indices))[0])) + 1, len(expected.nested_row_splits[0])) def test_read_and_map(self): batch_size = 32 with tf.Graph().as_default() as graph: ds = hb.data.ParquetDataset( [self._filename], batch_size=batch_size, fields=['col2', 'col0']) def _parse(values): return values['col0'], values['col2'] ds = ds.map(_parse) ds = ds.prefetch(4) batch = hb.data.make_one_shot_iterator(ds).get_next() c = self._df['col0'] with tf.Session(graph=graph) as sess: for i in xrange(3): result = sess.run(batch) start_row = i * batch_size end_row = (i + 1) * batch_size expected_items = c[start_row:end_row].to_numpy().tolist() expected_values = [] expected_splits = [0] for item in expected_items: expected_values.extend(item) expected_splits.append(expected_splits[-1] + len(item)) expected = hb.data.DataFrame.Value( np.array(expected_values), [np.array(expected_splits, dtype=np.int32)]) actual = result[0] np.testing.assert_allclose(actual.values, expected.values) np.testing.assert_equal( actual.nested_row_splits, expected.nested_row_splits) if __name__ == '__main__': hbtest.main(f'{__file__}.xml')
35.778626
79
0.631747
1,239
9,374
4.566586
0.170299
0.04772
0.031813
0.028279
0.724461
0.708554
0.69795
0.693178
0.693178
0.687699
0
0.015746
0.241199
9,374
261
80
35.915709
0.779699
0.089183
0
0.73991
0
0
0.021964
0
0
0
0
0
0.053812
1
0.040359
false
0
0.053812
0.004484
0.103139
0.004484
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
876a9361ce74a5296ca91aaa83a22ed2d50ec116
267
py
Python
input/stsci/ps_cone.py
JohnGood/servicemon
a320033904ee86c710248e68f12de7da7959853c
[ "BSD-3-Clause" ]
null
null
null
input/stsci/ps_cone.py
JohnGood/servicemon
a320033904ee86c710248e68f12de7da7959853c
[ "BSD-3-Clause" ]
null
null
null
input/stsci/ps_cone.py
JohnGood/servicemon
a320033904ee86c710248e68f12de7da7959853c
[ "BSD-3-Clause" ]
null
null
null
[ {'base_name': 'PanSTARRS', 'service_type': 'xcone', 'adql': '', 'access_url': 'https://catalogs.mast.stsci.edu/api/v0.1/panstarrs/dr2/' 'mean.votable?flatten_response=false&raw=false&sort_by=distance' '&ra={}&dec={}&radius={}' } ]
26.7
76
0.59176
32
267
4.78125
0.9375
0
0
0
0
0
0
0
0
0
0
0.013636
0.17603
267
9
77
29.666667
0.681818
0
0
0
0
0
0.707865
0.318352
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
876c718e9f374053a0be56fb0f09ceebbd0dbb6c
539
py
Python
lab_02/math_v1.py
idsdlab/basicai_sp21
af9acba34c0417fed830de1b61753c50fd303169
[ "MIT" ]
1
2021-03-23T16:18:00.000Z
2021-03-23T16:18:00.000Z
lab_02/math_v1.py
idsdlab/basicai_sp21
af9acba34c0417fed830de1b61753c50fd303169
[ "MIT" ]
null
null
null
lab_02/math_v1.py
idsdlab/basicai_sp21
af9acba34c0417fed830de1b61753c50fd303169
[ "MIT" ]
null
null
null
# sin(x) import math def derivative_of_sin(x): return math.cos(x) if __name__ == '__main__': x = 2.0 y = derivative_of_sin(x) print('x is ', x, 'derivative of sin(x): ', y) x = math.pi / 2 y = derivative_of_sin(x) print('x is ', x, 'derivative of sin(x): ', y) x = 0 y = derivative_of_sin(x) print('x is ', x, 'derivative of sin(x): ', y) while True: x = input('input x: ') x = float(x) y = derivative_of_sin(x) print('x is ', x, 'derivative of sin(x): ', y)
22.458333
54
0.534323
90
539
3
0.244444
0.148148
0.5
0.533333
0.651852
0.651852
0.651852
0.651852
0.651852
0.651852
0
0.010582
0.298701
539
24
54
22.458333
0.703704
0.011132
0
0.444444
0
0
0.234962
0
0
0
0
0
0
1
0.055556
false
0
0.055556
0.055556
0.166667
0.222222
0
0
0
null
0
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
5e522fd7cc95bdb82c4dbda39acc5ac1ec5a29bf
3,174
py
Python
tests/fixtures/digests/medium_content_99999.py
elifesciences/elife-bot
d3a102c8030e4b7ec83cbd45e5f839dba4f9ffd9
[ "MIT" ]
17
2015-02-10T07:10:29.000Z
2021-05-14T22:24:45.000Z
tests/fixtures/digests/medium_content_99999.py
elifesciences/elife-bot
d3a102c8030e4b7ec83cbd45e5f839dba4f9ffd9
[ "MIT" ]
459
2015-03-31T18:24:23.000Z
2022-03-30T19:44:40.000Z
tests/fixtures/digests/medium_content_99999.py
elifesciences/elife-bot
d3a102c8030e4b7ec83cbd45e5f839dba4f9ffd9
[ "MIT" ]
9
2015-04-18T16:57:31.000Z
2020-10-30T11:49:13.000Z
# coding=utf-8 from collections import OrderedDict EXPECTED = OrderedDict( [ ("title", u"Fishing for errors in the\xa0tests"), ("contentFormat", "html"), ( "content", u'<figure><img src="https://iiif.elifesciences.org/digests/99999%2Fdigest-99999.jpg/full/full/0/default.jpg" /><figcaption><b>It’s not just mammals who can recognise sample data.</b>\xa0Image credit:\xa0Anonymous and Anonymous\xa0(CC BY\xa04.0)</figcaption></figure><h1>Fishing for errors in the\xa0tests</h1><h2>Testing a document which mimics the format of a file we’ve used \xa0before plus CO<sub>2</sub> and Ca<sup>2+</sup>.</h2><hr/><p>Microbes live in us and on us. They are tremendously important for our health, but remain difficult to understand, since a microbial community typically consists of hundreds of species that interact in complex ways that we cannot fully characterize. It is tempting to ask whether one might instead characterize such a community as a whole, treating it as a multicellular "super-organism". However, taking this view beyond a metaphor is controversial, because the formal criteria of multicellularity require pervasive levels of cooperation between organisms that do not occur in most natural communities.</p><p>In nature, entire communities of microbes routinely come into contact – for example, kissing can mix together the communities in each person’s mouth. Can such events be usefully described as interactions between community-level "wholes", even when individual bacteria do not cooperate with each other? And can these questions be asked in a rigorous mathematical framework?</p><p>Mikhail Tikhonov has now developed a theoretical model that shows that communities of purely "selfish" members may effectively act together when competing with another community for resources. This model offers a new way to formalize the "super-organism" metaphor: although individual members compete against each other within a community, when seen from the outside the community interacts with its environment and with other communities much like a single organism.</p><p>This perspective blurs the distinction between two fundamental concepts: competition and genetic recombination. Competition combines two communities to produce a third where species are grouped in a new way, just as the genetic material of parents is recombined in their offspring.</p><p>Tikhonov’s model is highly simplified, but this suggests that the "cohesion" seen when viewing an entire community is a general consequence of ecological interactions. In addition, the model considers only competitive interactions, but in real life, species depend on each other; for example, one organism\'s waste is another\'s food. A natural next step would be to incorporate such cooperative interactions into a similar model, as cooperation is likely to make community cohesion even stronger.</p><hr/><p><em>Originally published at <a href="https://elifesciences.org/digests/99999">https://elifesciences.org/digests/99999</a>.</em></p>', ), ("tags", ["Face Recognition", "Neuroscience", "Vision"]), ("license", "cc-40-by"), ] )
186.705882
2,846
0.772842
474
3,174
5.177215
0.561181
0.00326
0.028117
0.03423
0.05053
0.023635
0
0
0
0
0
0.01482
0.149653
3,174
16
2,847
198.375
0.894035
0.003781
0
0
0
0.153846
0.925633
0.074684
0
0
0
0
0
1
0
false
0
0.153846
0
0.153846
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
5e58dbad8f548081ccf0ccbf5aeb2063834eb2c6
15
py
Python
bottery/conf/global_settings.py
Yusadolat/bottery
b239793709e68cc5036b6d2a31fc101190efba85
[ "MIT" ]
null
null
null
bottery/conf/global_settings.py
Yusadolat/bottery
b239793709e68cc5036b6d2a31fc101190efba85
[ "MIT" ]
null
null
null
bottery/conf/global_settings.py
Yusadolat/bottery
b239793709e68cc5036b6d2a31fc101190efba85
[ "MIT" ]
null
null
null
TEMPLATES = []
7.5
14
0.6
1
15
9
1
0
0
0
0
0
0
0
0
0
0
0
0.2
15
1
15
15
0.75
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
5e5d2cc610a0b8821a1d7981e70dad68e016ae2f
255
py
Python
ui_tests/exporter/pages/sites_page.py
django-doctor/lite-frontend
330ff9575fd22d7c4c42698ac2d653244e6180d6
[ "MIT" ]
1
2021-10-16T16:36:58.000Z
2021-10-16T16:36:58.000Z
ui_tests/exporter/pages/sites_page.py
django-doctor/lite-frontend
330ff9575fd22d7c4c42698ac2d653244e6180d6
[ "MIT" ]
45
2020-08-11T14:37:46.000Z
2022-03-29T17:03:02.000Z
ui_tests/exporter/pages/sites_page.py
django-doctor/lite-frontend
330ff9575fd22d7c4c42698ac2d653244e6180d6
[ "MIT" ]
3
2021-02-01T06:26:19.000Z
2022-02-21T23:02:46.000Z
from ui_tests.exporter.pages.BasePage import BasePage class SitesPage(BasePage): SITES_CHECKBOX = ".govuk-checkboxes__input" def click_sites_checkbox(self, no): self.driver.find_elements_by_css_selector(self.SITES_CHECKBOX)[no].click()
28.333333
82
0.776471
34
255
5.5
0.705882
0.208556
0
0
0
0
0
0
0
0
0
0
0.12549
255
8
83
31.875
0.838565
0
0
0
0
0
0.094118
0.094118
0
0
0
0
0
1
0.2
false
0
0.2
0
0.8
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
5e6019619bcae43680483c621f2acc3ce3130c31
159
py
Python
sj/utils.py
takluyver/sj
24dfc4938294eda70d5062a79d238542537a22f9
[ "MIT" ]
null
null
null
sj/utils.py
takluyver/sj
24dfc4938294eda70d5062a79d238542537a22f9
[ "MIT" ]
null
null
null
sj/utils.py
takluyver/sj
24dfc4938294eda70d5062a79d238542537a22f9
[ "MIT" ]
null
null
null
import os.path _home = os.path.expanduser('~') def compress_user(path): if path.startswith(_home): return '~' + path[len(_home):] return path
19.875
38
0.641509
21
159
4.666667
0.571429
0.122449
0.285714
0
0
0
0
0
0
0
0
0
0.201258
159
7
39
22.714286
0.771654
0
0
0
0
0
0.012579
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.666667
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
5e7252bd0dd005bd910347e2ace4387010f97dfb
277
py
Python
ticket/api/admin.py
neslisahgamze/seating
e51f2ef19e6378cc1aa74bcf4582023bf910d663
[ "MIT" ]
null
null
null
ticket/api/admin.py
neslisahgamze/seating
e51f2ef19e6378cc1aa74bcf4582023bf910d663
[ "MIT" ]
null
null
null
ticket/api/admin.py
neslisahgamze/seating
e51f2ef19e6378cc1aa74bcf4582023bf910d663
[ "MIT" ]
null
null
null
""" Admin """ from django.contrib import admin from .models import Ticket, Category, Seat, Section, Event # Register your models here. admin.site.register(Ticket) admin.site.register(Category) admin.site.register(Seat) admin.site.register(Section) admin.site.register(Event)
23.083333
58
0.779783
38
277
5.684211
0.394737
0.208333
0.393519
0
0
0
0
0
0
0
0
0
0.097473
277
11
59
25.181818
0.864
0.122744
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.285714
0
0.285714
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
5e96b600eef409e3c235e8fefde4d51746b0f6ea
7,073
py
Python
main.py
JJSrra/Research-GroupRecommendersForMovies
5ff66865192e648a3f1d280f7fea671e5ea7f09a
[ "MIT" ]
1
2019-11-24T17:19:58.000Z
2019-11-24T17:19:58.000Z
main.py
JJSrra/Research-GroupRecommendersForMovies
5ff66865192e648a3f1d280f7fea671e5ea7f09a
[ "MIT" ]
null
null
null
main.py
JJSrra/Research-GroupRecommendersForMovies
5ff66865192e648a3f1d280f7fea671e5ea7f09a
[ "MIT" ]
1
2019-11-24T17:20:15.000Z
2019-11-24T17:20:15.000Z
import numpy as np import pandas as pd import evaluation import similarity if __name__ == "__main__": array = np.array random_groups = pd.read_csv("generated_data/random_groups.csv", header=None).to_numpy() read_dict = open('generated_data/random_evaluated_movies.txt', 'r').read() random_evaluated_movies = eval(read_dict) avg_real_rankings = eval(open('generated_data/rankings/real_random_avg.txt', 'r').read()) min_real_rankings = eval(open('generated_data/rankings/real_random_min.txt', 'r').read()) max_real_rankings = eval(open('generated_data/rankings/real_random_max.txt', 'r').read()) maj_real_rankings = eval(open('generated_data/rankings/real_random_maj.txt', 'r').read()) real_random_rankings = {"avg": avg_real_rankings, "min": min_real_rankings, "max": max_real_rankings, "maj": maj_real_rankings} buddies_groups = pd.read_csv("generated_data/buddies_groups.csv", header=None).to_numpy() read_dict = open('generated_data/buddies_evaluated_movies.txt', 'r').read() buddies_evaluated_movies = eval(read_dict) avg_real_rankings = eval(open('generated_data/rankings/real_buddies_avg.txt', 'r').read()) min_real_rankings = eval(open('generated_data/rankings/real_buddies_min.txt', 'r').read()) max_real_rankings = eval(open('generated_data/rankings/real_buddies_max.txt', 'r').read()) maj_real_rankings = eval(open('generated_data/rankings/real_buddies_maj.txt', 'r').read()) real_buddies_rankings = {"avg": avg_real_rankings, "min": min_real_rankings, "max": max_real_rankings, "maj": maj_real_rankings} read_dict = open('generated_data/train_ratings.txt', 'r').read() train_ratings_by_user = eval(read_dict) read_dict = open('generated_data/test_ratings.txt', 'r').read() test_ratings_by_user = eval(read_dict) pearson = pd.read_csv("generated_data/pearson_correlation_matrix.csv", header=None).to_numpy() test_movies = pd.read_csv("generated_data/test_movies.csv", header=None).to_numpy().flatten() # ===================== BASELINE ===================== # Random predicted_random_baseline_rankings = evaluation.generate_baseline_predictions( random_groups, random_evaluated_movies, test_ratings_by_user, pearson, "generated_data/rankings/predicted_random_baseline_") evaluation.evaluate_predictions( predicted_random_baseline_rankings, real_random_rankings, "generated_data/baseline_random_ndcg.csv") # Buddies predicted_buddies_baseline_rankings = evaluation.generate_baseline_predictions( buddies_groups, buddies_evaluated_movies, test_ratings_by_user, pearson, "generated_data/rankings/predicted_buddies_baseline_") evaluation.evaluate_predictions( predicted_buddies_baseline_rankings, real_buddies_rankings, "generated_data/baseline_buddies_ndcg.csv") # ===================== POGRS ===================== # Random predicted_random_pogrs_rankings = evaluation.generate_pogrs_predictions( random_groups, random_evaluated_movies, test_ratings_by_user, pearson, "generated_data/rankings/predicted_random_pogrs.txt") evaluation.evaluate_predictions( predicted_random_pogrs_rankings, real_random_rankings, "generated_data/pogrs_random_ndcg.csv") # Buddies predicted_buddies_pogrs_rankings = evaluation.generate_pogrs_predictions( buddies_groups, buddies_evaluated_movies, test_ratings_by_user, pearson, "generated_data/rankings/predicted_buddies_pogrs.txt") evaluation.evaluate_predictions( predicted_buddies_pogrs_rankings, real_buddies_rankings, "generated_data/pogrs_buddies_ndcg.csv") # ===================== EMPATHY ===================== # Random predicted_random_empathy_rankings = evaluation.generate_empathy_predictions( random_groups, random_evaluated_movies, test_ratings_by_user, pearson, "generated_data/rankings/predicted_random_empathy.txt") evaluation.evaluate_predictions( predicted_random_empathy_rankings, real_random_rankings, "generated_data/empathy_random_ndcg.csv") # Buddies predicted_buddies_empathy_rankings = evaluation.generate_empathy_predictions( buddies_groups, buddies_evaluated_movies, test_ratings_by_user, pearson, "generated_data/rankings/predicted_buddies_empathy.txt") evaluation.evaluate_predictions( predicted_buddies_empathy_rankings, real_buddies_rankings, "generated_data/empathy_buddies_ndcg.csv") # ===================== CINEPHILE ===================== # Random predicted_random_cinephile_rankings = evaluation.generate_cinephile_predictions( random_groups, random_evaluated_movies, train_ratings_by_user, test_ratings_by_user, pearson, "generated_data/rankings/predicted_random_cinephile.txt") evaluation.evaluate_predictions( predicted_random_cinephile_rankings, real_random_rankings, "generated_data/cinephile_random_ndcg.csv") # Buddies predicted_buddies_cinephile_rankings = evaluation.generate_cinephile_predictions( buddies_groups, buddies_evaluated_movies, train_ratings_by_user, test_ratings_by_user, pearson, "generated_data/rankings/predicted_buddies_cinephile.txt") evaluation.evaluate_predictions( predicted_buddies_cinephile_rankings, real_buddies_rankings, "generated_data/cinephile_buddies_ndcg.csv") # ===================== OPTIMIST ===================== # Random predicted_random_optimist_rankings = evaluation.generate_optimist_predictions( random_groups, random_evaluated_movies, train_ratings_by_user, test_ratings_by_user, pearson, "generated_data/rankings/predicted_random_optimist.txt") evaluation.evaluate_predictions( predicted_random_optimist_rankings, real_random_rankings, "generated_data/optimist_random_ndcg.csv") # Buddies predicted_buddies_optimist_rankings = evaluation.generate_optimist_predictions( buddies_groups, buddies_evaluated_movies, train_ratings_by_user, test_ratings_by_user, pearson, "generated_data/rankings/predicted_buddies_optimist.txt") evaluation.evaluate_predictions( predicted_buddies_optimist_rankings, real_buddies_rankings, "generated_data/optimist_buddies_ndcg.csv") # ===================== SIMILARITY ===================== # Random predicted_random_similarity_rankings = evaluation.generate_similarity_predictions( random_groups, random_evaluated_movies, test_ratings_by_user, pearson, "generated_data/rankings/predicted_random_similarity.txt") evaluation.evaluate_predictions( predicted_random_similarity_rankings, real_random_rankings, "generated_data/similarity_random_ndcg.csv") # Buddies predicted_buddies_similarity_rankings = evaluation.generate_similarity_predictions( buddies_groups, buddies_evaluated_movies, test_ratings_by_user, pearson, "generated_data/rankings/predicted_buddies_similarity.txt") evaluation.evaluate_predictions( predicted_buddies_similarity_rankings, real_buddies_rankings, "generated_data/similarity_buddies_ndcg.csv")
53.583333
162
0.767284
817
7,073
6.145655
0.066095
0.103565
0.083649
0.044015
0.860386
0.79765
0.429397
0.429397
0.429397
0.41904
0
0
0.115934
7,073
131
163
53.992366
0.802975
0.057401
0
0.157895
1
0
0.268652
0.262034
0
0
0
0
0
1
0
false
0
0.052632
0
0.052632
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
5ea73af6824ea2a0e6a3c96297a9bb355f0309ec
240
py
Python
tools/update_terms_main.py
AutuanLiu/Kalman-Filter
1f4b567fdf9b6465d27ffcaba8da8404f3f80c62
[ "MIT" ]
8
2019-01-13T06:41:56.000Z
2021-11-15T09:00:46.000Z
tools/update_terms_main.py
AutuanLiu/Kalman-Filter
1f4b567fdf9b6465d27ffcaba8da8404f3f80c62
[ "MIT" ]
null
null
null
tools/update_terms_main.py
AutuanLiu/Kalman-Filter
1f4b567fdf9b6465d27ffcaba8da8404f3f80c62
[ "MIT" ]
3
2019-07-24T07:40:22.000Z
2021-05-03T12:37:18.000Z
"""重要工具包之工具使用 Copyright: ---------- Author: AutuanLiu Date: 2019/1/14 """ from core_tools import update_terms # !更新候选项 update_terms(data_root='data/', data_type_set={'linear', 'nonlinear', 'longlag_linear', 'longlag_nonlinear'})
18.461538
109
0.691667
29
240
5.448276
0.758621
0.139241
0
0
0
0
0
0
0
0
0
0.033333
0.125
240
12
110
20
0.719048
0.345833
0
0
0
0
0.34
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
0d6064cb2c5d5627c75aa1e498478a24622679ea
537
py
Python
F29.BioEntity/RunTests.py
foundation29org/F29.BioEntity
531947fb85465f363e63e268b9e3ca17283d76dd
[ "MIT" ]
null
null
null
F29.BioEntity/RunTests.py
foundation29org/F29.BioEntity
531947fb85465f363e63e268b9e3ca17283d76dd
[ "MIT" ]
null
null
null
F29.BioEntity/RunTests.py
foundation29org/F29.BioEntity
531947fb85465f363e63e268b9e3ca17283d76dd
[ "MIT" ]
null
null
null
import unittest from BioEntityTests import * USE_CACHE = True suite = unittest.TestSuite() suite.addTest(HPOGraphTests('en', use_cache=USE_CACHE)) suite.addTest(MondoGraphTests('en', use_cache=USE_CACHE)) suite.addTest(BioPhensTests('en', use_cache=USE_CACHE)) suite.addTest(BioGenesTests(use_cache=USE_CACHE)) suite.addTest(HPOGraphTests('es', use_cache=USE_CACHE)) suite.addTest(MondoGraphTests('es', use_cache=USE_CACHE)) suite.addTest(BioPhensTests('es', use_cache=USE_CACHE)) unittest.TextTestRunner(verbosity=2).run(suite)
25.571429
57
0.800745
72
537
5.763889
0.277778
0.289157
0.185542
0.26988
0.607229
0.563855
0.496386
0
0
0
0
0.001988
0.063315
537
20
58
26.85
0.823062
0
0
0
0
0
0.022388
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.166667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0d7ee4518600b49e06e13d744afa57a8d4da5585
6,705
py
Python
.test/test-usecase-5.py
ItWillBeBetter/data-protection
6db0f4c7089fe110af8065583959d7dc13dba45e
[ "MIT-0" ]
93
2019-01-02T16:51:45.000Z
2022-02-13T07:29:36.000Z
.test/test-usecase-5.py
mukhendra/data-protection
6a4629855b181a4e5fbfb690b9b22203ff7e1b2e
[ "MIT-0" ]
2
2019-10-16T07:37:23.000Z
2021-06-07T10:38:37.000Z
.test/test-usecase-5.py
mukhendra/data-protection
6a4629855b181a4e5fbfb690b9b22203ff7e1b2e
[ "MIT-0" ]
54
2019-01-06T21:39:57.000Z
2022-02-25T09:01:05.000Z
import unittest import subprocess import boto3 import os import time from botocore.exceptions import ClientError # Note: beware of the limit on number of PCA certificates that can be generated! class TestUseCase5(unittest.TestCase): def setUp(self): self.ddb_client = boto3.client('dynamodb') self.acm_pca_client = boto3.client('acm-pca') self.ssm_client = boto3.client('ssm') self.s3_resource = boto3.resource('s3') self.cwd = os.path.dirname(os.path.realpath(__file__)) + '/../usecase-5' def test_step1(self): print("Test step 1") child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-1.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) time.sleep(5) try: self.ssm_client.get_parameter(Name='/dp-workshop/target_group_arn')['Parameter']['Value'] except ClientError as e: self.fail(msg='missing parameter /dp-workshop/target_group_arn') def test_step2(self): print("Test step 2") child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-2.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) time.sleep(5) try: self.ssm_client.get_parameter(Name='/dp-workshop/subordinate_pca_arn')['Parameter']['Value'] except ClientError as e: self.fail(msg='missing parameter /dp-workshop/subordinate_pca_arn') try: self.ssm_client.get_parameter(Name='/dp-workshop/subordinate_ca_serial_number')['Parameter']['Value'] except ClientError as e: self.fail(msg='missing parameter /dp-workshop/subordinate_ca_serial_number') try: crl_bucket_name = self.ssm_client.get_parameter(Name='/dp-workshop/crl_bucket_name')['Parameter']['Value'] try: self.s3_resource.meta.client.head_bucket(Bucket=crl_bucket_name) except ClientError as e: self.fail(msg='missing s3 bucket '+crl_bucket_name) except ClientError as e: self.fail(msg='missing parameter /dp-workshop/crl_bucket_name') def test_step3(self): print("Test step 3") child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-3.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) time.sleep(1) self.assertEqual(os.path.isfile(self.cwd+'/self-signed-cert.pem'), True ) try: self.ssm_client.get_parameter(Name='/dp-workshop/rootca_serial_number')['Parameter']['Value'] except ClientError as e: self.fail(msg='missing parameter /dp-workshop/rootca_serial_number') def test_step4(self): print("Test step 4") child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-4.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) time.sleep(1) self.assertEqual(os.path.isfile(self.cwd+'/signed_subordinate_ca_cert.pem'), True ) def test_step5(self): print("Test step 5") child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-5.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) time.sleep(1) subordinate_pca_arn = self.ssm_client.get_parameter(Name='/dp-workshop/subordinate_pca_arn')['Parameter']['Value'] response = self.acm_pca_client.describe_certificate_authority( CertificateAuthorityArn=subordinate_pca_arn ) self.assertEqual(response['CertificateAuthority']['Status'],'ACTIVE') def test_step6(self): print("Test step 6") child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-6.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) time.sleep(1) self.assertEqual(os.path.isfile(self.cwd+'/cert_chain.pem'), True ) def test_step7(self): print("Test step 7") child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-7.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) def test_step8(self): print("Test step 8") child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-8.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) def test_step9(self): print("Test step 9") crl_bucket_name = None try: crl_bucket_name = self.ssm_client.get_parameter(Name='/dp-workshop/crl_bucket_name')['Parameter']['Value'] except ClientError as e: self.fail(msg='missing parameter /dp-workshop/crl_bucket_name') subordinate_pca_arn = self.ssm_client.get_parameter(Name='/dp-workshop/subordinate_pca_arn')['Parameter']['Value'] child = subprocess.Popen(['python', self.cwd+'/usecase-5-step-9-cleanup.py']) output = child.communicate()[0] self.assertEqual(child.returncode, 0) time.sleep(5) # validate s3 bucket removed if crl_bucket_name is not None: try: self.s3_resource.meta.client.head_bucket(Bucket=crl_bucket_name) self.fail(msg='missing s3 bucket: '+crl_bucket_name) except ClientError as e: print "bucket removed: "+crl_bucket_name # validate files removed self.assertEqual(os.path.isfile(self.cwd+'/self-signed-cert.pem'), False ) self.assertEqual(os.path.isfile(self.cwd+'/signed_subordinate_ca_cert.pem'), False ) self.assertEqual(os.path.isfile(self.cwd+'/cert_chain.pem'), False ) # validate parameters removed params = ['/dp-workshop/listener_arn', '/dp-workshop/private_cert_arn', '/dp-workshop/target_group_arn', '/dp-workshop/subordinate_pca_arn', '/dp-workshop/rootca_serial_number', '/dp-workshop/subordinate_ca_serial_number', '/dp-workshop/crl_bucket_name' ] for param in params: try: self.ssm_client.get_parameter(Name=param) self.fail("parameter not removed: "+param) except ClientError as e: print "Validating parameter deleted: "+param # validate subordinate pca removed response = self.acm_pca_client.describe_certificate_authority( CertificateAuthorityArn=subordinate_pca_arn ) self.assertEqual(response['CertificateAuthority']['Status'],'DELETED') if __name__ == '__main__': unittest.main()
43.258065
122
0.631767
812
6,705
5.051724
0.160099
0.051195
0.044369
0.037299
0.757192
0.711604
0.699171
0.69137
0.69137
0.582399
0
0.014717
0.23997
6,705
154
123
43.538961
0.790228
0.028188
0
0.416667
1
0
0.241167
0.12212
0
0
0
0
0.128788
0
null
null
0
0.045455
null
null
0.083333
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
0d8065ff4b5a1e13452a4d6db853c06988c8a31f
1,063
py
Python
swifpy/dictionary.py
koher/kollect
1e77541d22e8dac753fdcc5d0ac1f745c771fc89
[ "MIT" ]
1
2017-02-17T03:29:43.000Z
2017-02-17T03:29:43.000Z
swifpy/dictionary.py
koher/kollect
1e77541d22e8dac753fdcc5d0ac1f745c771fc89
[ "MIT" ]
null
null
null
swifpy/dictionary.py
koher/kollect
1e77541d22e8dac753fdcc5d0ac1f745c771fc89
[ "MIT" ]
null
null
null
import typing as tp import builtins as py from .optional import Optional, optional K = tp.TypeVar('K') V = tp.TypeVar('V') class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]): def __init__(self, entries: tp.Dict[K, V]) -> None: self._entries: tp.Dict[K, V] = py.dict(entries) def __getitem__(self, key: K) -> Optional[V]: return optional(self._entries.get(key)) def __setitem__(self, key: K, value: V) -> None: self._entries[key] = value @property def keys(self) -> tp.Iterable[K]: return self._entries.keys() @property def values(self) -> tp.Iterable[V]: return self._entries.values() def for_each(self, body: tp.Callable[[K, V], None]) -> None: for key, value in self._entries.items(): body(key, value) @property def count(self) -> int: return len(self._entries) def remove_all(self) -> None: self._entries.clear() def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]: return self._entries.items().__iter__()
26.575
64
0.614299
150
1,063
4.146667
0.3
0.176849
0.072347
0.028939
0.061093
0.061093
0
0
0
0
0
0
0.231421
1,063
39
65
27.25641
0.761322
0
0
0.107143
0
0
0.001881
0
0
0
0
0
0
1
0.321429
false
0
0.107143
0.178571
0.642857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
0d8540287377b95878451a908cc20410bcd2406e
293
py
Python
app/__init__.py
kelvzxu/Biometric_machine_connector
2d21823d4d30db8c2e21d649f8dec6bc6d6b7d96
[ "MIT" ]
null
null
null
app/__init__.py
kelvzxu/Biometric_machine_connector
2d21823d4d30db8c2e21d649f8dec6bc6d6b7d96
[ "MIT" ]
null
null
null
app/__init__.py
kelvzxu/Biometric_machine_connector
2d21823d4d30db8c2e21d649f8dec6bc6d6b7d96
[ "MIT" ]
null
null
null
from flask import Flask import logging app = Flask(__name__) from . import attendance from . import controller from . import user from . import base if __name__ == "__main__": logging.basicConfig(filename='error.log',level=logging.DEBUG) app.run(host='0.0.0.0', port=5000)
22.538462
65
0.709898
41
293
4.780488
0.560976
0.204082
0.030612
0
0
0
0
0
0
0
0
0.033195
0.177474
293
13
66
22.538462
0.780083
0
0
0
0
0
0.085106
0
0
0
0
0
0
1
0
false
0
0.6
0
0.6
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
0d8cb5dd1121a6a3b914e4f84f48c99c33756d17
466
py
Python
backend/app/models/meme.py
jnana-cetana/XMeme
cb7d5e31c455dc3c7e751dff9e7c8e067090936b
[ "MIT" ]
19
2021-02-15T19:55:25.000Z
2022-02-01T09:05:07.000Z
backend/app/models/meme.py
jnana-cetana/XMeme
cb7d5e31c455dc3c7e751dff9e7c8e067090936b
[ "MIT" ]
null
null
null
backend/app/models/meme.py
jnana-cetana/XMeme
cb7d5e31c455dc3c7e751dff9e7c8e067090936b
[ "MIT" ]
null
null
null
from sqlalchemy import Column, DateTime, Index, Integer, String from sqlalchemy.sql import func from app.db.base_class import Base class Meme(Base): id = Column(Integer, primary_key=True) name = Column(String, nullable=False) url = Column(String, nullable=False) caption = Column(String, nullable=False) created_at = Column(DateTime, server_default=func.now()) __table_args__ = ( Index('created_at_idx', created_at.desc()), )
27.411765
63
0.712446
61
466
5.245902
0.540984
0.1125
0.1875
0.234375
0
0
0
0
0
0
0
0
0.180258
466
16
64
29.125
0.837696
0
0
0
0
0
0.030043
0
0
0
0
0
0
1
0
false
0
0.25
0
0.833333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
0d9eddf41da9c980c042e11a76962d009a925c7f
102
py
Python
PyGamesScripts/Sudoku/main.py
tanvi355/Awesome_Python_Scripts
dd906dff3c311e260851f8720cbce77d7398be43
[ "MIT" ]
3
2021-06-22T07:12:31.000Z
2022-02-27T18:40:16.000Z
PyGamesScripts/Sudoku/main.py
tanvi355/Awesome_Python_Scripts
dd906dff3c311e260851f8720cbce77d7398be43
[ "MIT" ]
1
2021-07-02T15:36:02.000Z
2021-07-02T15:37:25.000Z
PyGamesScripts/Sudoku/main.py
tanvi355/Awesome_Python_Scripts
dd906dff3c311e260851f8720cbce77d7398be43
[ "MIT" ]
2
2021-10-03T16:22:08.000Z
2021-10-03T17:35:14.000Z
from app_class import * # The main driver code if __name__=="__main__": app = App() app.run()
12.75
24
0.647059
15
102
3.8
0.733333
0.210526
0
0
0
0
0
0
0
0
0
0
0.22549
102
7
25
14.571429
0.721519
0.196078
0
0
0
0
0.1
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
0de2565984e10cb4bf0a07ad6ba2fa3958be3a23
592
py
Python
tests/util/module_for_assert_has_decorator.py
emartech/ems-dataflow-testframework
c70b0768573e9c4af98173bb0b444dee442de53a
[ "MIT" ]
null
null
null
tests/util/module_for_assert_has_decorator.py
emartech/ems-dataflow-testframework
c70b0768573e9c4af98173bb0b444dee442de53a
[ "MIT" ]
null
null
null
tests/util/module_for_assert_has_decorator.py
emartech/ems-dataflow-testframework
c70b0768573e9c4af98173bb0b444dee442de53a
[ "MIT" ]
1
2022-02-17T19:56:44.000Z
2022-02-17T19:56:44.000Z
from tests.util.dummy_decorator import dummydec class DecoratorDummy: @dummydec(103, stop_max_delay=10000, wait_fixed=1000) def method_with_decorator(self): pass @staticmethod @dummydec(multiple_by=2) def return_one(): return 1 def method_without_decorator(self): pass @dummydec(stop_max_delay=10000, wait_fixed=1000) def method_with_decorator_2(self): pass @dummydec(stop_max_delay=10001) def method_with_decorator_3(self): pass @staticmethod def method_with_different_decorator(): pass
20.413793
57
0.692568
74
592
5.216216
0.445946
0.11658
0.134715
0.170984
0.38342
0.38342
0.26943
0.26943
0.26943
0.26943
0
0.066079
0.233108
592
28
58
21.142857
0.784141
0
0
0.35
0
0
0
0
0
0
0
0
0
1
0.3
false
0.25
0.05
0.05
0.45
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
3
0df3bc3866506cba140bf15a4e7a7226f8c88e21
6,470
py
Python
OCAES/OCAES_rules.py
EnergyModels/OCAES
d848d9fa621767e036824110de87450d524b7687
[ "MIT" ]
null
null
null
OCAES/OCAES_rules.py
EnergyModels/OCAES
d848d9fa621767e036824110de87450d524b7687
[ "MIT" ]
null
null
null
OCAES/OCAES_rules.py
EnergyModels/OCAES
d848d9fa621767e036824110de87450d524b7687
[ "MIT" ]
null
null
null
from pyomo.environ import * def pwr_wind(model, t): return model.P_wind[t] == model.X_wind * model.P_wind_fr[t] # ---------------- # Variable capacities - X_wind and X_storage are optimized variables # ---------------- def capacity_well_var(model): return model.X_well == model.X_storage def capacity_cmp_var(model): return model.X_cmp == model.X_storage def capacity_exp_var(model): return model.X_exp == model.X_storage # ---------------- # capacity constraints # ---------------- # power def pwr_capacity_cmp(model, t): return model.P_cmp[t] <= model.X_cmp def pwr_capacity_exp(model, t): return model.P_exp[t] <= model.X_exp def pwr_capacity_well_in(model, t): return model.P_cmp[t] <= model.X_well def pwr_capacity_well_out(model, t): return model.P_exp[t] <= model.X_well def pwr_grid_sell(model, t): return model.P_grid_sell[t] <= model.X_wind def pwr_grid_limit(model, t): return model.P_grid_sell[t] + model.P_grid_buy[t] <= model.X_wind def pwr_grid_buy_enabled(model, t): return model.P_grid_buy[t] <= model.X_wind def pwr_grid_buy_disabled(model, t): return model.P_grid_buy[t] <= 0.0 # turned off arbitrage def pwr_dispatch_const(model, t): return model.P_grid_sell[t] == model.X_dispatch # energy def energy_capacity_well_min(model, t): return model.E_well_min_fr * model.E_well_duration * model.X_well <= model.E_well[t] def energy_capacity_well_max(model, t): return model.E_well[t] <= model.E_well_max_fr * model.E_well_duration * model.X_well # ---------------- # power balance # ---------------- def power_balance(model, t): return model.P_wind[t] + model.P_exp[t] + model.P_grid_buy[t] == model.P_curtail[t] + model.P_grid_sell[t] + \ model.P_cmp[t] # ---------------- # energy stored # ---------------- def energy_stored_init(model): return model.E_well_init == value(model.X_well) * model.E_well_init_fr * model.E_well_duration def energy_stored(model, t): if t == 1: return model.E_well[t] == model.E_well_init else: return model.E_well[t] == model.E_well[t - 1] + \ model.delta_t * model.P_cmp[t] * model.eta_storage_single - \ model.delta_t * model.P_exp[t] / model.eta_storage_single def energy_stored_final(model): return model.E_well[model.T - 1] == model.E_well_init # ---------------- # avoided emissions # ---------------- def emissions(model): return model.avoided_emissions == model.delta_t * sum( model.P_grid_sell[t] * model.emissions_grid[t] for t in model.t) - \ model.delta_t * sum(model.P_grid_buy[t] * model.emissions_grid[t] for t in model.t) # ---------------- # electricity # ---------------- def total_electricity(model): return model.total_electricity == sum(model.P_wind[t] for t in model.t) def yearly_electricity(model): return model.yearly_electricity == sum(model.P_grid_sell[t] for t in model.t) * 8760 / (model.T * model.delta_t) def yearly_electricity_generated(model): return model.yearly_electricity_generated == sum(model.P_wind[t] for t in model.t) * 8760 / ( model.T * model.delta_t) def yearly_electricity_purchased(model): return model.yearly_electricity_purchased == sum(model.P_grid_buy[t] for t in model.t) * 8760 / ( model.T * model.delta_t) def yearly_curtailment(model): return model.yearly_curtailment == sum(model.P_curtail[t] for t in model.t) * 8760 / (model.T * model.delta_t) def yearly_exp_usage(model): return model.yearly_exp_usage == sum(model.P_exp[t] for t in model.t) * 8760 / (model.T * model.delta_t) def yearly_cmp_usage(model): return model.yearly_cmp_usage == sum(model.P_cmp[t] for t in model.t) * 8760 / (model.T * model.delta_t) # ---------------- # economics # ---------------- def electricity_revenue(model, t): return model.electricity_revenue[t] == (model.P_grid_sell[t] - model.P_grid_buy[t]) * \ model.delta_t * model.price_grid[t] def yearly_electricity_revenue(model): return model.yearly_electricity_revenue == model.delta_t * sum( (model.P_grid_sell[t] - model.P_grid_buy[t]) * model.price_grid[t] for t in model.t) * 8760 / ( model.T * model.delta_t) def yearly_capacity_credit(model): return model.yearly_capacity_credit == model.CC_value * 365 * min(model.X_wind, model.X_wind * model.CC_wind + model.X_exp * model.CC_exp) def yearly_capacity_credit_simple(model): return model.yearly_capacity_credit == model.CC_value * 365 * ( model.X_wind * model.CC_wind + model.X_exp * model.CC_exp) def yearly_total_revenue(model): return model.yearly_total_revenue == model.yearly_electricity_revenue + model.yearly_capacity_credit def yearly_costs(model): # capital costs = capacity * annual costs capital = model.CRF_wind * model.X_wind * model.C_wind + \ model.CRF_well * model.X_well * model.C_well + \ model.CRF_cmp * model.X_cmp * model.C_cmp + \ model.CRF_exp * model.X_exp * model.C_exp # fixed costs fixed = model.X_wind * model.F_wind + model.X_well * model.F_well + model.X_cmp * model.F_cmp + model.X_exp * model.F_exp # variable costs variable = model.V_wind * model.delta_t * sum(model.P_wind[t] for t in model.t) + model.V_cmp * model.delta_t * sum( model.P_cmp[t] for t in model.t) + model.V_exp * model.delta_t * sum(model.P_exp[t] for t in model.t) variable = variable * 8760 / (model.T * model.delta_t) # scale to one year return model.yearly_costs == capital + fixed + variable def yearly_profit(model): return model.yearly_profit == model.yearly_total_revenue - model.yearly_costs # ---------------- # value of electricity (denominator of COVE) # ---------------- def yearly_electricity_value(model): return model.yearly_electricity_value == model.delta_t * sum( model.P_grid_sell[t] * model.price_grid[t] for t in model.t) / model.price_grid_average * 8760 / ( model.T * model.delta_t) # ---------------- # objective # ---------------- def objective_revenue(model): return model.yearly_electricity_revenue def objective_COVE(model): return model.yearly_electricity_value def objective_PROFIT(model): return model.yearly_profit def objective_COST(model): return model.yearly_costs
29.409091
128
0.659505
984
6,470
4.057927
0.097561
0.066116
0.092161
0.088154
0.624593
0.505635
0.4002
0.363386
0.293263
0.262209
0
0.008949
0.188253
6,470
219
129
29.543379
0.751333
0.097836
0
0.057692
0
0
0
0
0
0
0
0
0
1
0.375
false
0
0.009615
0.355769
0.769231
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
21b45d0d9aff34a1e81f93915016e44c46a400c5
724
py
Python
2019/day_04/python/day04.py
josephroquedev/advent-of-code
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
[ "MIT" ]
null
null
null
2019/day_04/python/day04.py
josephroquedev/advent-of-code
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
[ "MIT" ]
2
2021-06-02T00:41:38.000Z
2021-11-30T10:05:29.000Z
2019/day_04/python/day04.py
autoreleasefool/advent-of-code
bb217deb7a5f5ed5c8c04cb726ddadb5b042ee4d
[ "MIT" ]
null
null
null
from aoc import AOC, chunk aoc = AOC(year=2019, day=4) password_range = range(256310, 732736 + 1) # Part 1 def has_pair(p): return any(x == y for x, y in zip(p[:-1], p[1:])) def never_decreases(p): return all(int(y) >= int(x) for x, y in zip(p[:-1], p[1:])) def is_valid(p): return has_pair(str(p)) and never_decreases(str(p)) valid_passwords = sum(1 for p in password_range if is_valid(p)) aoc.p1(valid_passwords) # Part 2 def has_pair(p): p = [None, None] + list(p) + [None, None] return any( x == y and x != w and y != z for w, x, y, z in zip(p[:-3], p[1:-2], p[2:-1], p[3:]) ) valid_passwords = sum(1 for p in password_range if is_valid(p)) aoc.p2(valid_passwords)
19.567568
63
0.603591
143
724
2.951049
0.307692
0.023697
0.042654
0.052133
0.317536
0.317536
0.317536
0.317536
0.317536
0.317536
0
0.060391
0.222376
724
36
64
20.111111
0.689165
0.017956
0
0.210526
0
0
0
0
0
0
0
0
0
1
0.210526
false
0.263158
0.052632
0.157895
0.473684
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
0
0
0
3
21b7ccf14041bc20009777d2f302a84cb3f7a469
146
py
Python
t20.py
tarunagarwal99/python
13dbf4c4226430a3436d91d60f234827a05a6e0e
[ "MIT" ]
null
null
null
t20.py
tarunagarwal99/python
13dbf4c4226430a3436d91d60f234827a05a6e0e
[ "MIT" ]
null
null
null
t20.py
tarunagarwal99/python
13dbf4c4226430a3436d91d60f234827a05a6e0e
[ "MIT" ]
null
null
null
a=int(input("enter first number:")) b=int(input("enter second number:")) sum=0 for i in range (a,b+1): sum=sum+i print(sum)
20.857143
37
0.575342
26
146
3.230769
0.615385
0.190476
0.309524
0
0
0
0
0
0
0
0
0.018182
0.246575
146
6
38
24.333333
0.745455
0
0
0
0
0
0.278571
0
0
0
0
0
0
1
0
false
0
0
0
0
0.166667
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
21ba91bfcfbcd8bf7711f61ba60acdb9aff1a4ec
549
py
Python
bad_pickle.py
xvitkov/pv080_test.py
63fd45ac01217d005a99ee123129a780bc35d825
[ "MIT" ]
null
null
null
bad_pickle.py
xvitkov/pv080_test.py
63fd45ac01217d005a99ee123129a780bc35d825
[ "MIT" ]
null
null
null
bad_pickle.py
xvitkov/pv080_test.py
63fd45ac01217d005a99ee123129a780bc35d825
[ "MIT" ]
null
null
null
# contains bunch of buggy examples # taken from https://hackernoon.com/10-common-security-gotchas-in-python # -and-how-to-avoid-them-e19fbe265e03 def transcode_file(filename): """Input injection""" command = 'ffmpeg -i "{source}" output_file.mpg'.format(source=filename) return command # a bad idea! def access_function(user): """Assert statements""" assert user.is_admin, 'user does not have access' # secure code... class RunBinSh(): """Pickles""" def __reduce__(self): return
24.954545
77
0.653916
67
549
5.238806
0.835821
0
0
0
0
0
0
0
0
0
0
0.020882
0.214936
549
21
78
26.142857
0.793503
0.380692
0
0
0
0
0.204013
0
0
0
0
0
0.125
1
0.375
false
0
0
0.125
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
21ca6e75553bcbfe8b235a7ece1bd934c9f2a013
1,157
py
Python
server/migrations/0006_auto_20201107_1031.py
valdergallo/dbuddy
19de984abcdf8d0a512f725be1aa9abef67b5c6f
[ "MIT" ]
2
2020-11-07T11:51:20.000Z
2021-08-16T11:19:20.000Z
server/migrations/0006_auto_20201107_1031.py
valdergallo/dbuddy
19de984abcdf8d0a512f725be1aa9abef67b5c6f
[ "MIT" ]
7
2020-11-07T11:26:36.000Z
2021-08-21T12:21:45.000Z
server/migrations/0006_auto_20201107_1031.py
valdergallo/dbuddy
19de984abcdf8d0a512f725be1aa9abef67b5c6f
[ "MIT" ]
2
2021-08-17T21:18:34.000Z
2021-08-21T16:42:46.000Z
# Generated by Django 3.1.3 on 2020-11-07 10:31 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('server', '0005_auto_20201107_1029'), ] operations = [ migrations.AlterField( model_name='server', name='databases', field=models.ManyToManyField(blank=True, null=True, to='server.Database'), ), migrations.AlterField( model_name='server', name='host', field=models.CharField(default='localhost', max_length=250), ), migrations.AlterField( model_name='server', name='ssh_key', field=models.FileField(blank=True, null=True, upload_to='ssh_keys'), ), migrations.AlterField( model_name='server', name='ssh_key_pass', field=models.CharField(blank=True, max_length=250, null=True), ), migrations.AlterField( model_name='servergroup', name='servers', field=models.ManyToManyField(blank=True, null=True, to='server.Server'), ), ]
29.666667
86
0.574762
116
1,157
5.603448
0.439655
0.153846
0.192308
0.223077
0.415385
0.415385
0.295385
0.295385
0.156923
0
0
0.045735
0.300778
1,157
38
87
30.447368
0.757726
0.038894
0
0.4375
1
0
0.133333
0.020721
0
0
0
0
0
1
0
false
0.03125
0.03125
0
0.125
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
21d867c7cdac703a2de9cf0371f70433dff33583
46
py
Python
python/testData/inspections/PyUnboundLocalVariableInspection/WhileOneBreak.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/inspections/PyUnboundLocalVariableInspection/WhileOneBreak.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/inspections/PyUnboundLocalVariableInspection/WhileOneBreak.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
while 1: a = 1 break print(a) # pass
9.2
16
0.5
8
46
2.875
0.75
0
0
0
0
0
0
0
0
0
0
0.071429
0.391304
46
4
17
11.5
0.75
0.086957
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
21ef4e538eea810b1056d007defd55429f80961a
8,650
py
Python
board.py
roukaour/sudoku
f405f627b49b7400c8b41c36194d99ef4b6b179e
[ "MIT" ]
7
2015-09-30T19:09:29.000Z
2022-03-26T05:51:18.000Z
board.py
roukaour/sudoku
f405f627b49b7400c8b41c36194d99ef4b6b179e
[ "MIT" ]
1
2018-07-10T18:31:45.000Z
2018-07-11T01:32:11.000Z
board.py
roukaour/sudoku
f405f627b49b7400c8b41c36194d99ef4b6b179e
[ "MIT" ]
2
2019-06-26T11:37:31.000Z
2020-06-27T07:55:08.000Z
from __future__ import print_function from utils import * from color import * from cell import * from collections import namedtuple from itertools import product from functools import wraps Strategy = namedtuple('Strategy', ('name', 'function')) class Sudoku(object): """A 9x9 Sudoku board.""" UNIT_TYPES = ['row', 'column', 'block'] # A dictionary of solution strategies, keyed by their increasing difficulty strategies = {0: Strategy('nothing', lambda sudoku, verbose: False)} @classmethod def strategy(cls, name, difficulty): """Decorate a strategy function to register it for use in the solve method.""" def decorator(function): @wraps(function) def wrapper(sudoku, verbose): if sudoku.solved(): return False if verbose: print('Try', name) changed = function(sudoku, verbose) if verbose and not changed: print('...No', name, 'found') return changed cls.strategies[difficulty] = Strategy(name, wrapper) return wrapper return decorator def __init__(self, *cells): if len(cells) == 1: cells = cells[0] if isinstance(cells, str): cells = filter(None, (c if c in map(str, Cell.VALUES) else '0' if c in '0._*' else '' for c in cells)) cells = list(cells) if len(cells) == 9: cells = flatten(map(list, cells)) if len(cells) != 81: raise ValueError('Invalid Sudoku board: %r' % (cells,)) self.cm = [] while cells: row, cells = cells[:9], cells[9:] row = [Cell(i, len(self.cm), d) for i, d in enumerate(row)] self.cm.append(row) def __repr__(self): return 'Sudoku(%r)' % self.code_str() def __str__(self): return self.terse_str() if self.solved() else self.verbose_str() def code_str(self): return ''.join(str(c.value()) for c in self.cells()) def terse_str(self): return ''' 1 2 3 4 5 6 7 8 9 +-------+-------+-------+ A | %s %s %s | %s %s %s | %s %s %s | B | %s %s %s | %s %s %s | %s %s %s | C | %s %s %s | %s %s %s | %s %s %s | +-------+-------+-------+ D | %s %s %s | %s %s %s | %s %s %s | E | %s %s %s | %s %s %s | %s %s %s | F | %s %s %s | %s %s %s | %s %s %s | +-------+-------+-------+ G | %s %s %s | %s %s %s | %s %s %s | H | %s %s %s | %s %s %s | %s %s %s | J | %s %s %s | %s %s %s | %s %s %s | +-------+-------+-------+''' % tuple(c.value() for c in self.cells()) def verbose_str(self): s = flatten(list(' (%d) ' % c.value()) if c.solved() else [c.dcs.get(d, Color.NEITHER).colored(d) if d in c.ds else '.' for d in Cell.VALUES] for c in self.cells()) return (''' 1 2 3 4 5 6 7 8 9 +-------------+-------------+-------------+ | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | A | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | | | | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | B | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | | | | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | C | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | +-------------+-------------+-------------+ | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | D | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | | | | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | E | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | | | | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | F | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | +-------------+-------------+-------------+ | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | G | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | | | | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | H | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | | | | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | J | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | %s%s%s %s%s%s %s%s%s | +-------------+-------------+-------------+''' % tuple(flatten(flatten(flatten(transpose(chunk(chunk(h, 3), 3)) for h in chunk(s, 81)))))) def verify(self): verified = True values = set(Cell.VALUES) for i in range(9): verified &= union(c.ds for c in self.row(i)) == values verified &= union(c.ds for c in self.col(i)) == values verified &= union(c.ds for c in self.block(i)) == values for y, x in product(range(9), range(9)): cell = self.cell(x, y) verified &= 1 <= len(cell.ds) <= 9 and cell.ds.issubset(values) if not verified: raise RuntimeError('Sudoku board is in an invalid state') def copy(self): return Sudoku(*self.cm) def cells(self): return flatten(self.cm) def row(self, y): return self.cm[y] def row_without(self, x, y): return self.cm[y][:x] + self.cm[y][x+1:] def col(self, x): return [row[x] for row in self.cm] def col_without(self, x, y): return [row[x] for i, row in enumerate(self.cm) if i != y] def block(self, i): y, x = divmod(i, 3) return list(flatten(self.cm[3*y+i][3*x:3*x+3] for i in range(3))) def block_without(self, x, y): bx3, by3 = x // 3 * 3, y // 3 * 3 return list(self.cm[by3+i][bx3+j] for i in range(3) for j in range(3) if by3+i != y or bx3+j != x) def unit(self, unit_type, i): units = { 'row': self.row, 'column': self.col, 'block': self.block} return units[unit_type](i) def unit_without(self, unit_type, x, y): units_without = { 'row': self.row_without, 'column': self.col_without, 'block': self.block_without} return units_without[unit_type](x, y) def seen_from(self, x, y): row = self.row_without(x, y) col = self.col_without(x, y) block = self.block_without(x, y) return set(row + col + block) def cell_block(self, x, y): return self.block(y // 3 * 3 + x // 3) def cell(self, x, y): return self.cm[y][x] def row_name(self, y): return Cell.ROWS[y] def col_name(self, x): return Cell.COLS[x] def block_name(self, i): return Cell.BLOCKS[i] def unit_name(self, unit_type, i): unit_names = { 'row': self.row_name, 'column': self.col_name, 'block': self.block_name} return unit_names[unit_type](i) def solved(self): return all(c.solved() for c in self.cells()) def num_solved(self): return len([c for c in self.cells() if c.solved()]) def solve(self, max_difficulty=None, exclude=None, include_only=None, verbose=False): """Try to solve any unsolved cells with all registered strategies.""" if verbose: print(self.terse_str()) if verbose: print('Solving:', self.code_str()) num_solved = self.num_solved() difficulty = 0 last_difficulty = -1 while last_difficulty: last_difficulty = self._solve_strategies(max_difficulty, exclude, include_only, verbose) difficulty = max(difficulty, last_difficulty) if verbose: print('Completely solved!' if self.solved() else '...Cannot solve further', '(solved %d cells)' % (self.num_solved() - num_solved)) print('Most advanced strategy used:', self.strategies[difficulty].name) print('Solved:', self.code_str()) print(self) return self.strategies[difficulty].name def _solve_strategies(self, max_difficulty=None, exclude=None, include_only=None, verbose=False): """Try all registered strategies in order of increasing difficulty.""" if self.solved(): return 0 for difficulty, strategy in sorted(self.strategies.items()): if ((max_difficulty is not None and difficulty > max_difficulty) or (exclude is not None and difficulty in exclude) or (include_only is not None and difficulty not in include_only)): continue if strategy.function(self, verbose): return difficulty return 0
35.162602
98
0.536879
1,749
8,650
2.609491
0.096055
0.346626
0.50745
0.659947
0.307625
0.260736
0.256792
0.256792
0.231814
0.231814
0
0.009647
0.22104
8,650
245
99
35.306122
0.667706
0.03422
0
0.17561
0
0.17561
0.387015
0.032641
0
0
0
0
0
1
0.15122
false
0
0.034146
0.082927
0.356098
0.043902
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
21f3eb917a7fe5419c3bebc125b7b3d54ac6bcd3
633
py
Python
casepro/profiles/models.py
praekelt/helpdesk
69a7242679c30d2f7cb30a433809e738b9756a3c
[ "BSD-3-Clause" ]
null
null
null
casepro/profiles/models.py
praekelt/helpdesk
69a7242679c30d2f7cb30a433809e738b9756a3c
[ "BSD-3-Clause" ]
null
null
null
casepro/profiles/models.py
praekelt/helpdesk
69a7242679c30d2f7cb30a433809e738b9756a3c
[ "BSD-3-Clause" ]
null
null
null
from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.db import models from django.utils.translation import ugettext_lazy as _ from casepro.cases.models import Partner class Profile(models.Model): """ Extension for the user class """ user = models.OneToOneField(User) partner = models.ForeignKey(Partner, null=True, related_name='user_profiles') full_name = models.CharField(verbose_name=_("Full name"), max_length=128, null=True) change_password = models.BooleanField(default=False, help_text=_("User must change password on next login"))
31.65
112
0.767773
84
633
5.583333
0.595238
0.063966
0
0
0
0
0
0
0
0
0
0.005525
0.14218
633
19
113
33.315789
0.858195
0.044234
0
0
0
0
0.103565
0
0
0
0
0
0
1
0
false
0.1
0.5
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
3
1d011029f0027ca57a60b4fdb462f2a827799b63
74
bzl
Python
src/main/kotlin/nl/tulipsolutions/keyderivation/deps.bzl
TulipSolutions/bitcoin-tools
1a78849d243d2e2834eb2c80276863ed5746f6c8
[ "Apache-2.0" ]
null
null
null
src/main/kotlin/nl/tulipsolutions/keyderivation/deps.bzl
TulipSolutions/bitcoin-tools
1a78849d243d2e2834eb2c80276863ed5746f6c8
[ "Apache-2.0" ]
null
null
null
src/main/kotlin/nl/tulipsolutions/keyderivation/deps.bzl
TulipSolutions/bitcoin-tools
1a78849d243d2e2834eb2c80276863ed5746f6c8
[ "Apache-2.0" ]
null
null
null
KEYDERIVATION_ARTIFACTS = [ "org.bouncycastle:bcprov-jdk15on:1.64", ]
18.5
43
0.72973
8
74
6.625
1
0
0
0
0
0
0
0
0
0
0
0.076923
0.121622
74
3
44
24.666667
0.738462
0
0
0
0
0
0.486486
0.486486
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
1d03a637d6ea0605fc1d459f9aef0c352e6eb042
737
py
Python
esl/simulation/entity.py
fagan2888/ESL
24ffa903e8c5b9e725eed9861623d4b6a4a205a2
[ "Apache-2.0" ]
1
2020-04-17T18:18:08.000Z
2020-04-17T18:18:08.000Z
esl/simulation/entity.py
fagan2888/ESL
24ffa903e8c5b9e725eed9861623d4b6a4a205a2
[ "Apache-2.0" ]
null
null
null
esl/simulation/entity.py
fagan2888/ESL
24ffa903e8c5b9e725eed9861623d4b6a4a205a2
[ "Apache-2.0" ]
null
null
null
from typing import List, TypeVar, Generic from esl.simulation.identity import Identity class Entity():#Generic[E]): __slots__ = ["identifier", "children"] identifier: Identity#[E] children: Identity.Digit#[E].Digit def __init__(self, identifier: Identity = Identity()): self.identifier = identifier self.children = 0 def __eq__(self, other: 'Entity'): return self.identifier.__eq__(other.identifier) def __ne__(self, other: 'Entity'): return self.identifier.__ne__(other.identifier) def __hash__(self): return self.identifier.__hash__() def __repr__(self): return f"Entity[E] {self.identifier}" def __str__(self): return self.__repr__()
26.321429
58
0.666214
83
737
5.385542
0.337349
0.187919
0.134228
0.09396
0.1566
0.1566
0
0
0
0
0
0.001724
0.213026
737
28
59
26.321429
0.768966
0.032564
0
0
0
0
0.080169
0
0
0
0
0
0
1
0.315789
false
0
0.105263
0.263158
0.894737
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
1d08fcec9ab16e44922b06fdb2f2a22af020eed8
496
py
Python
EXAMPLES/Generator [Yield]/return_example.py
nkpydev/Python-Learning
e571a6890f750aafde49d24d00cc57c52419b61a
[ "MIT" ]
null
null
null
EXAMPLES/Generator [Yield]/return_example.py
nkpydev/Python-Learning
e571a6890f750aafde49d24d00cc57c52419b61a
[ "MIT" ]
null
null
null
EXAMPLES/Generator [Yield]/return_example.py
nkpydev/Python-Learning
e571a6890f750aafde49d24d00cc57c52419b61a
[ "MIT" ]
null
null
null
#-----------------------------------------------------------------# #! Python3 # Author : NK # Month, Year : March, 2019 # Info : Program to get Squares of numbers upto 25, using return # Desc : An example program to show usage of return #-----------------------------------------------------------------# def nextSquare(x): return x*x def main(): for x in range(25): print(nextSquare(x)) if __name__ == '__main__': main()
29.176471
77
0.405242
47
496
4.106383
0.702128
0.093264
0
0
0
0
0
0
0
0
0
0.02439
0.256048
496
17
78
29.176471
0.498645
0.671371
0
0
0
0
0.051613
0
0
0
0
0
0
1
0.285714
false
0
0
0.142857
0.428571
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
3
0dff7e31184db53a05f8d08fac2443533fb9a275
175
py
Python
tests/roots/test-add_source_parser/source_parser.py
sephalon/sphinx
c892fe98f7a311dc51340f8d98e63d9790b7f820
[ "BSD-2-Clause" ]
1
2021-11-06T17:09:04.000Z
2021-11-06T17:09:04.000Z
tests/roots/test-add_source_parser/source_parser.py
sephalon/sphinx
c892fe98f7a311dc51340f8d98e63d9790b7f820
[ "BSD-2-Clause" ]
1
2017-07-15T22:46:50.000Z
2017-07-15T22:46:50.000Z
tests/roots/test-add_source_parser/source_parser.py
sephalon/sphinx
c892fe98f7a311dc51340f8d98e63d9790b7f820
[ "BSD-2-Clause" ]
1
2021-11-06T17:08:54.000Z
2021-11-06T17:08:54.000Z
# -*- coding: utf-8 -*- from docutils.parsers import Parser class TestSourceParser(Parser): pass def setup(app): app.add_source_parser('.test', TestSourceParser)
14.583333
52
0.702857
21
175
5.761905
0.809524
0
0
0
0
0
0
0
0
0
0
0.006849
0.165714
175
11
53
15.909091
0.821918
0.12
0
0
0
0
0.032895
0
0
0
0
0
0
1
0.2
false
0.2
0.2
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
3
df053021daeb4f423d824f9ece49d1dc9d864d19
1,171
py
Python
pygl/grid.py
andreevnick/robust-financial-portfolio-management-framework
9450a00c8d0e78a621afc08f29b17e20fbcb3592
[ "Apache-2.0", "MIT" ]
1
2021-10-30T14:06:59.000Z
2021-10-30T14:06:59.000Z
pygl/grid.py
doctormee/robust-fpm-cmc-msu-edu-2021
9450a00c8d0e78a621afc08f29b17e20fbcb3592
[ "Apache-2.0", "MIT" ]
5
2021-04-18T14:03:17.000Z
2021-05-15T10:42:05.000Z
pygl/grid.py
doctormee/robust-fpm-cmc-msu-edu-2021
9450a00c8d0e78a621afc08f29b17e20fbcb3592
[ "Apache-2.0", "MIT" ]
1
2021-04-18T14:03:43.000Z
2021-04-18T14:03:43.000Z
import numpy as np from .util import coalesce __all__ = ['Grid'] class Grid: def __init__(self, delta, logscale=False, center=None, dtype=None, dtype_p=None): self.dtype = coalesce(dtype, np.float64) self.dtype_p = coalesce(dtype_p, np.int64) self.delta = np.asarray(delta, dtype=self.dtype) self.logscale = logscale self.center = np.asarray(coalesce(center, self.delta*0), dtype=self.dtype) def x_trans(self, x): return x if self.logscale == False else np.log(x) def x_trans_inv(self, x): return x if self.logscale == False else np.exp(x) def get_projection(self, obj, **kwargs): # obj is an array of coordinates return self.get_point(obj) def get_point(self, x): x = self.x_trans(np.asarray(x, dtype=self.dtype)) return np.rint((x - self.center)/self.delta).astype(self.dtype_p) def map2x(self, point): return self.x_trans_inv(point * self.delta + self.center)
24.914894
85
0.550811
151
1,171
4.13245
0.298013
0.086538
0.067308
0.038462
0.11859
0.11859
0.11859
0.11859
0.11859
0.11859
0
0.007813
0.34415
1,171
47
86
24.914894
0.804688
0.025619
0
0
0
0
0.003509
0
0
0
0
0
0
1
0.285714
false
0
0.095238
0.190476
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
df208397d6f78277ea06be9554421d3136521236
5,926
py
Python
sdk/python/pulumi_aws_native/ec2/get_transit_gateway_multicast_domain.py
pulumi/pulumi-aws-native
1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3
[ "Apache-2.0" ]
29
2021-09-30T19:32:07.000Z
2022-03-22T21:06:08.000Z
sdk/python/pulumi_aws_native/ec2/get_transit_gateway_multicast_domain.py
pulumi/pulumi-aws-native
1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3
[ "Apache-2.0" ]
232
2021-09-30T19:26:26.000Z
2022-03-31T23:22:06.000Z
sdk/python/pulumi_aws_native/ec2/get_transit_gateway_multicast_domain.py
pulumi/pulumi-aws-native
1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3
[ "Apache-2.0" ]
4
2021-11-10T19:42:01.000Z
2022-02-05T10:15:49.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs __all__ = [ 'GetTransitGatewayMulticastDomainResult', 'AwaitableGetTransitGatewayMulticastDomainResult', 'get_transit_gateway_multicast_domain', 'get_transit_gateway_multicast_domain_output', ] @pulumi.output_type class GetTransitGatewayMulticastDomainResult: def __init__(__self__, creation_time=None, options=None, state=None, tags=None, transit_gateway_multicast_domain_arn=None, transit_gateway_multicast_domain_id=None): if creation_time and not isinstance(creation_time, str): raise TypeError("Expected argument 'creation_time' to be a str") pulumi.set(__self__, "creation_time", creation_time) if options and not isinstance(options, dict): raise TypeError("Expected argument 'options' to be a dict") pulumi.set(__self__, "options", options) if state and not isinstance(state, str): raise TypeError("Expected argument 'state' to be a str") pulumi.set(__self__, "state", state) if tags and not isinstance(tags, list): raise TypeError("Expected argument 'tags' to be a list") pulumi.set(__self__, "tags", tags) if transit_gateway_multicast_domain_arn and not isinstance(transit_gateway_multicast_domain_arn, str): raise TypeError("Expected argument 'transit_gateway_multicast_domain_arn' to be a str") pulumi.set(__self__, "transit_gateway_multicast_domain_arn", transit_gateway_multicast_domain_arn) if transit_gateway_multicast_domain_id and not isinstance(transit_gateway_multicast_domain_id, str): raise TypeError("Expected argument 'transit_gateway_multicast_domain_id' to be a str") pulumi.set(__self__, "transit_gateway_multicast_domain_id", transit_gateway_multicast_domain_id) @property @pulumi.getter(name="creationTime") def creation_time(self) -> Optional[str]: """ The time the transit gateway multicast domain was created. """ return pulumi.get(self, "creation_time") @property @pulumi.getter def options(self) -> Optional['outputs.OptionsProperties']: """ The options for the transit gateway multicast domain. """ return pulumi.get(self, "options") @property @pulumi.getter def state(self) -> Optional[str]: """ The state of the transit gateway multicast domain. """ return pulumi.get(self, "state") @property @pulumi.getter def tags(self) -> Optional[Sequence['outputs.TransitGatewayMulticastDomainTag']]: """ The tags for the transit gateway multicast domain. """ return pulumi.get(self, "tags") @property @pulumi.getter(name="transitGatewayMulticastDomainArn") def transit_gateway_multicast_domain_arn(self) -> Optional[str]: """ The Amazon Resource Name (ARN) of the transit gateway multicast domain. """ return pulumi.get(self, "transit_gateway_multicast_domain_arn") @property @pulumi.getter(name="transitGatewayMulticastDomainId") def transit_gateway_multicast_domain_id(self) -> Optional[str]: """ The ID of the transit gateway multicast domain. """ return pulumi.get(self, "transit_gateway_multicast_domain_id") class AwaitableGetTransitGatewayMulticastDomainResult(GetTransitGatewayMulticastDomainResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetTransitGatewayMulticastDomainResult( creation_time=self.creation_time, options=self.options, state=self.state, tags=self.tags, transit_gateway_multicast_domain_arn=self.transit_gateway_multicast_domain_arn, transit_gateway_multicast_domain_id=self.transit_gateway_multicast_domain_id) def get_transit_gateway_multicast_domain(transit_gateway_multicast_domain_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetTransitGatewayMulticastDomainResult: """ The AWS::EC2::TransitGatewayMulticastDomain type :param str transit_gateway_multicast_domain_id: The ID of the transit gateway multicast domain. """ __args__ = dict() __args__['transitGatewayMulticastDomainId'] = transit_gateway_multicast_domain_id if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('aws-native:ec2:getTransitGatewayMulticastDomain', __args__, opts=opts, typ=GetTransitGatewayMulticastDomainResult).value return AwaitableGetTransitGatewayMulticastDomainResult( creation_time=__ret__.creation_time, options=__ret__.options, state=__ret__.state, tags=__ret__.tags, transit_gateway_multicast_domain_arn=__ret__.transit_gateway_multicast_domain_arn, transit_gateway_multicast_domain_id=__ret__.transit_gateway_multicast_domain_id) @_utilities.lift_output_func(get_transit_gateway_multicast_domain) def get_transit_gateway_multicast_domain_output(transit_gateway_multicast_domain_id: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetTransitGatewayMulticastDomainResult]: """ The AWS::EC2::TransitGatewayMulticastDomain type :param str transit_gateway_multicast_domain_id: The ID of the transit gateway multicast domain. """ ...
42.633094
169
0.716672
646
5,926
6.20743
0.176471
0.146633
0.240898
0.303741
0.490524
0.386284
0.292269
0.238404
0.237157
0.206234
0
0.000846
0.201822
5,926
138
170
42.942029
0.846934
0.139555
0
0.102273
1
0
0.178085
0.119333
0
0
0
0
0
1
0.113636
false
0
0.068182
0
0.295455
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
df208a63a9af272793b6ab835cfd826c19f14586
224
py
Python
While/While2.py
liyuanyuan11/Python
d94cc7ab39e56c6e24bfc741a30da77590d1d220
[ "MIT" ]
null
null
null
While/While2.py
liyuanyuan11/Python
d94cc7ab39e56c6e24bfc741a30da77590d1d220
[ "MIT" ]
null
null
null
While/While2.py
liyuanyuan11/Python
d94cc7ab39e56c6e24bfc741a30da77590d1d220
[ "MIT" ]
null
null
null
name=input("Please input my daughter's name:") while name!="Nina" and name!="Anime": print("I'm sorry, but the name is not valid.") name=input("Please input my daughter's name:") print("Yes."+name+"is my daughter.")
37.333333
50
0.674107
38
224
3.973684
0.526316
0.198676
0.198676
0.264901
0.463576
0.463576
0.463576
0.463576
0
0
0
0
0.147321
224
5
51
44.8
0.790576
0
0
0.4
0
0
0.575893
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
df235e8fe3be4e23a350204599e3a479a24f361d
801
py
Python
utils/middleware/globalrequestmiddleware.py
pwqbot/eoj3
46be6a6f192798e74eab7b327bb8df7ca73575d9
[ "MIT" ]
107
2017-03-15T11:53:45.000Z
2019-09-06T11:23:44.000Z
utils/middleware/globalrequestmiddleware.py
OS-EDU/eoj3
f117dcd4e3cea7d150c3e3794e7255e00d486c88
[ "MIT" ]
27
2019-09-24T12:44:48.000Z
2022-03-11T23:18:21.000Z
utils/middleware/globalrequestmiddleware.py
OS-EDU/eoj3
f117dcd4e3cea7d150c3e3794e7255e00d486c88
[ "MIT" ]
25
2019-10-11T10:39:12.000Z
2022-03-18T05:15:57.000Z
import threading class GlobalRequestMiddleware(object): _threadmap = {} def __init__(self, get_response): self.get_response = get_response def __call__(self, request): self.process_request(request) response = self.get_response(request) return self.process_response(request, response) @classmethod def get_current_request(cls): return cls._threadmap[threading.get_ident()] def process_request(self, request): self._threadmap[threading.get_ident()] = request def process_exception(self, request, exception): try: del self._threadmap[threading.get_ident()] except KeyError: pass def process_response(self, request, response): try: del self._threadmap[threading.get_ident()] except KeyError: pass return response
23.558824
52
0.722846
92
801
5.98913
0.271739
0.079855
0.15245
0.188748
0.250454
0.196007
0.196007
0.196007
0.196007
0.196007
0
0
0.184769
801
33
53
24.272727
0.843798
0
0
0.32
0
0
0
0
0
0
0
0
0
1
0.24
false
0.08
0.04
0.04
0.48
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
3
df486600be7faa70d817e9e4dbf08dfebc37cf4f
628
py
Python
apps/core/account/management/commands/set_default_db.py
GMNaim/Online-Exam-System
ed1e18cf0c0fa7f896bde261988982b49825e61f
[ "MIT" ]
null
null
null
apps/core/account/management/commands/set_default_db.py
GMNaim/Online-Exam-System
ed1e18cf0c0fa7f896bde261988982b49825e61f
[ "MIT" ]
null
null
null
apps/core/account/management/commands/set_default_db.py
GMNaim/Online-Exam-System
ed1e18cf0c0fa7f896bde261988982b49825e61f
[ "MIT" ]
null
null
null
from django.core.management.base import BaseCommand from django.core.management import call_command class Command(BaseCommand): help = 'Set all default models objects to setup default db.' def handle(self, *args, **kwargs): print('[+] Resource: ', end='') call_command('loaddata', 'import_sql/resource.json') print('[+] Permission: ', end='') call_command('loaddata', 'import_sql/permission.json') print('[+] Role: ', end='') call_command('loaddata', 'import_sql/role.json') print('[+] User: ', end='') call_command('loaddata', 'import_sql/user.json')
34.888889
64
0.63535
72
628
5.416667
0.458333
0.141026
0.14359
0.225641
0.317949
0.317949
0
0
0
0
0
0
0.199045
628
17
65
36.941176
0.775348
0
0
0
0
0
0.355662
0.079745
0
0
0
0
0
1
0.076923
false
0
0.461538
0
0.692308
0.307692
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
df66703db6fd7e4f83cfb9aadc3e08c228c6e3ef
2,022
py
Python
mongodbatlas/atlasresource.py
ozlevka-work/MongoDB-Atlas-API
781221653ea3956243c03670ab50a5308390a76f
[ "Apache-2.0" ]
null
null
null
mongodbatlas/atlasresource.py
ozlevka-work/MongoDB-Atlas-API
781221653ea3956243c03670ab50a5308390a76f
[ "Apache-2.0" ]
null
null
null
mongodbatlas/atlasresource.py
ozlevka-work/MongoDB-Atlas-API
781221653ea3956243c03670ab50a5308390a76f
[ "Apache-2.0" ]
null
null
null
import pprint import random import string import json from datetime import datetime from dateutil import parser from mongodbatlas.outputformat import OutputFormat def json_datetime_encoder(item:datetime): return str(item) class AtlasResource: """ Base class for Atlas Resources """ def __init__(self, resource:dict=None): if resource: self._resource = resource if "created" in self._resource: # convert date string to datetime obj self._resource["created"] = parser.parse(self._resource["created"]) else: self._resource = {} @property def timestamp(self): return self._timestamp @property def id(self): return self._resource["id"] @property def resource(self): return self._resource @property def name(self): return self._resource["name"] @name.setter def name(self, new_name): self._resource["name"] = new_name @property def resource(self): return self._resource @resource.setter def resource(self, item): self._resource = item @staticmethod def iter_print(iter, func, format): for i in iter: func(i).print_resource(format) def pprint(self, fmt=OutputFormat.SUMMARY): if fmt is OutputFormat.SUMMARY: print(self.summary_string()) elif fmt is OutputFormat.PYTHON: pprint.pprint(self._resource, indent=2) elif fmt is OutputFormat.JSON: print(json.dumps(self._resource, indent=2, default=json_datetime_encoder)) # def __call__(self): # return self._resource @staticmethod def random_name(): return ''.join(random.choices(string.ascii_uppercase + string.digits, k=10)) def __str__(self): return f"{pprint.pformat(self._resource)}" def __repr__(self): res=f"{pprint.pformat(self._resource, width=40)}" return f"{self.__class__.__name__}(resource={res})"
24.361446
86
0.638971
234
2,022
5.299145
0.303419
0.164516
0.067742
0.08871
0.108065
0.066129
0.066129
0
0
0
0
0.004011
0.260138
2,022
82
87
24.658537
0.824866
0.055885
0
0.192982
0
0
0.077208
0.054997
0
0
0
0
0
1
0.245614
false
0
0.122807
0.140351
0.54386
0.157895
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
df6eef552fa665a22aaeb2ff7fbb0a497b62dda4
245
wsgi
Python
web/zagg/zagg.wsgi
fahlmant/openshift-tools
dbb4f16ccde3404c36c23108c45ca7b67138ee12
[ "Apache-2.0" ]
164
2015-07-29T17:35:04.000Z
2021-12-16T16:38:04.000Z
web/zagg/zagg.wsgi
fahlmant/openshift-tools
dbb4f16ccde3404c36c23108c45ca7b67138ee12
[ "Apache-2.0" ]
3,634
2015-06-09T13:49:15.000Z
2022-03-23T20:55:44.000Z
web/zagg/zagg.wsgi
fahlmant/openshift-tools
dbb4f16ccde3404c36c23108c45ca7b67138ee12
[ "Apache-2.0" ]
250
2015-06-08T19:53:11.000Z
2022-03-01T04:51:23.000Z
# Force location of zagg-web handler into path import sys sys.path.insert(0, '/var/www/zagg') from zagg import flask_app as application # Allow logging/errors to show up in Apache logs import logging, sys logging.basicConfig(stream=sys.stderr)
27.222222
48
0.787755
41
245
4.682927
0.756098
0
0
0
0
0
0
0
0
0
0
0.004695
0.130612
245
8
49
30.625
0.896714
0.371429
0
0
0
0
0.086093
0
0
0
0
0
0
1
0
true
0
0.6
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
df70971d0fc42528b0d4cf44482d55e3bcdfb772
2,977
py
Python
setup.py
goetzk/python-nectarallocationclient
383165bdaa6e38a13b63398ae7099e94df68efcd
[ "Apache-2.0" ]
null
null
null
setup.py
goetzk/python-nectarallocationclient
383165bdaa6e38a13b63398ae7099e94df68efcd
[ "Apache-2.0" ]
null
null
null
setup.py
goetzk/python-nectarallocationclient
383165bdaa6e38a13b63398ae7099e94df68efcd
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup try: # for pip >= 10 from pip._internal.req import parse_requirements except ImportError: # for pip <= 9.0.3 from pip.req import parse_requirements requirements = parse_requirements("requirements.txt", session=False) entry_points = { 'openstack.cli.extension': ['allocation = nectarallocationclient.osc.plugin',], 'openstack.allocation.v1': [ 'allocation list = nectarallocationclient.osc.v1.allocations:ListAllocations', 'allocation show = nectarallocationclient.osc.v1.allocations:ShowAllocation', 'allocation amend = nectarallocationclient.osc.v1.allocations:AmendAllocation', 'allocation approve = nectarallocationclient.osc.v1.allocations:ApproveAllocation', 'allocation history = nectarallocationclient.osc.v1.allocations:AllocationHistory', 'allocation create = nectarallocationclient.osc.v1.allocations:CreateAllocation', 'allocation set = nectarallocationclient.osc.v1.allocations:UpdateAllocation', 'allocation zone list = nectarallocationclient.osc.v1.zones:ListZones', 'allocation zone show = nectarallocationclient.osc.v1.zones:ShowZone', 'allocation zone compute-homes = nectarallocationclient.osc.v1.zones:ListComputeHomes', 'allocation service-type list = nectarallocationclient.osc.v1.service_types:ListServiceTypes', 'allocation service-type show = nectarallocationclient.osc.v1.service_types:ShowServiceType', 'allocation quota list = nectarallocationclient.osc.v1.quotas:ListQuotas', 'allocation quota history = nectarallocationclient.osc.v1.quotas:QuotaHistory', 'allocation resource list = nectarallocationclient.osc.v1.resources:ListResources', 'allocation resource show = nectarallocationclient.osc.v1.resources:ShowResource', 'allocation grant list = nectarallocationclient.osc.v1.grants:ListGrants', 'allocation grant show = nectarallocationclient.osc.v1.grants:ShowGrant', ] } setup( name='nectarallocationclient', version='0.7.0', description=('Client for the Nectar Allocation system'), author='Sam Morrison', author_email='sorrison@gmail.com', url='https://github.com/NeCTAR-RC/python-nectarallocationclient', packages=[ 'nectarallocationclient', ], include_package_data=True, install_requires=[str(r.req) for r in requirements], license="Apache", zip_safe=False, classifiers=( 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Operating System :: OS Independent', ), entry_points=entry_points, )
43.144928
102
0.715821
292
2,977
7.25
0.421233
0.224374
0.22957
0.12565
0.036845
0
0
0
0
0
0
0.014646
0.174337
2,977
68
103
43.779412
0.846623
0.024521
0
0.067797
0
0
0.668966
0.372759
0
0
0
0
0
1
0
false
0
0.101695
0
0.101695
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
10bfbb168ab5bde6d22553a17d54719ac2702563
263
py
Python
kolibri/core/tasks/exceptions.py
MBKayro/kolibri
0a38a5fb665503cf8f848b2f65938e73bfaa5989
[ "MIT" ]
545
2016-01-19T19:26:55.000Z
2022-03-20T00:13:04.000Z
kolibri/core/tasks/exceptions.py
MBKayro/kolibri
0a38a5fb665503cf8f848b2f65938e73bfaa5989
[ "MIT" ]
8,329
2016-01-19T19:32:02.000Z
2022-03-31T21:23:12.000Z
kolibri/core/tasks/exceptions.py
MBKayro/kolibri
0a38a5fb665503cf8f848b2f65938e73bfaa5989
[ "MIT" ]
493
2016-01-19T19:26:48.000Z
2022-03-28T14:35:05.000Z
from concurrent.futures import CancelledError class UserCancelledError(CancelledError): """ An error raised when the user cancels the current job. """ pass class JobNotFound(Exception): pass class JobNotRestartable(Exception): pass
14.611111
58
0.722433
27
263
7.037037
0.740741
0.094737
0
0
0
0
0
0
0
0
0
0
0.212928
263
17
59
15.470588
0.917874
0.205323
0
0.428571
0
0
0
0
0
0
0
0
0
1
0
true
0.428571
0.142857
0
0.571429
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
3
10ca30de440dbc442bea680fc768b9006be4f079
216
py
Python
cq/management/commands/cq_clear_logs.py
furious-luke/django-cq
9e7debc293cd4bfe2ef674e0e44f1c9a8a240df5
[ "BSD-3-Clause" ]
31
2016-09-16T05:32:43.000Z
2018-03-08T12:41:30.000Z
cq/management/commands/cq_clear_logs.py
furious-luke/django-cq
9e7debc293cd4bfe2ef674e0e44f1c9a8a240df5
[ "BSD-3-Clause" ]
13
2017-01-12T15:04:50.000Z
2017-12-05T04:41:27.000Z
cq/management/commands/cq_clear_logs.py
furious-luke/django-cq
9e7debc293cd4bfe2ef674e0e44f1c9a8a240df5
[ "BSD-3-Clause" ]
6
2016-09-16T05:32:54.000Z
2017-12-03T18:08:35.000Z
from django.core.management.base import BaseCommand from cq.tasks import clear_logs class Command(BaseCommand): help = 'Clear all logs from REDIS.' def handle(self, *args, **options): clear_logs()
21.6
51
0.712963
29
216
5.241379
0.724138
0.118421
0
0
0
0
0
0
0
0
0
0
0.189815
216
9
52
24
0.868571
0
0
0
0
0
0.12037
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
10eb411d0070e6fd494578fce087a082caeb0162
59
py
Python
pywakit/__init__.py
y-agg/pywakit
6eacaf6881418fc5a7c1ebd5c0d1bc0164f3c990
[ "MIT" ]
3
2020-09-04T07:45:04.000Z
2021-02-14T01:44:40.000Z
pywakit/__init__.py
y-agg/pywakit
6eacaf6881418fc5a7c1ebd5c0d1bc0164f3c990
[ "MIT" ]
null
null
null
pywakit/__init__.py
y-agg/pywakit
6eacaf6881418fc5a7c1ebd5c0d1bc0164f3c990
[ "MIT" ]
null
null
null
from pywakit.whatsapp import WhatsApp __version__ = '0.0.6'
29.5
37
0.79661
9
59
4.777778
0.777778
0
0
0
0
0
0
0
0
0
0
0.056604
0.101695
59
2
38
29.5
0.754717
0
0
0
0
0
0.083333
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
8012640a619323c6019672e61062c7ef821e69cd
223
py
Python
optimizer.py
mullerpeter/hashcode2022
a2b36a34d27b001d4be6ee30f0aae9d0ace38ee6
[ "MIT" ]
null
null
null
optimizer.py
mullerpeter/hashcode2022
a2b36a34d27b001d4be6ee30f0aae9d0ace38ee6
[ "MIT" ]
null
null
null
optimizer.py
mullerpeter/hashcode2022
a2b36a34d27b001d4be6ee30f0aae9d0ace38ee6
[ "MIT" ]
null
null
null
from dataparser import Data from tqdm import tqdm import random def run_optimisation(data: Data, num_steps=10): pbar = tqdm(range(num_steps)) for i in pbar: continue # TODO: Implement Random Swaps
20.272727
47
0.699552
32
223
4.78125
0.65625
0.130719
0
0
0
0
0
0
0
0
0
0.011765
0.237668
223
10
48
22.3
0.888235
0.125561
0
0
0
0
0
0
0
0
0
0.1
0
1
0.142857
false
0
0.428571
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
1
0
0
3
8014cf829d480aa0017b71a091427dbd9664818d
1,450
py
Python
fluxcompensator/tests/test_pipeline.py
koepferl/FluxCompensator
751cac08971845069da8c962bc83459f091ba0f8
[ "BSD-2-Clause" ]
9
2017-06-22T15:29:01.000Z
2021-03-24T11:55:41.000Z
fluxcompensator/tests/test_pipeline.py
koepferl/FluxCompensator
751cac08971845069da8c962bc83459f091ba0f8
[ "BSD-2-Clause" ]
1
2020-06-16T21:01:51.000Z
2020-06-16T21:01:51.000Z
fluxcompensator/tests/test_pipeline.py
koepferl/FluxCompensator
751cac08971845069da8c962bc83459f091ba0f8
[ "BSD-2-Clause" ]
5
2017-06-22T14:57:24.000Z
2020-06-14T16:46:44.000Z
import pytest import numpy as np import os from ..database.compact_pipeline import CompactPipeline, IRAC1, IRAC2, IRAC3, IRAC4, MIPS1, MIPS2, MIPS3, PACS1, PACS2, PACS3, SPIRE1, SPIRE2, SPIRE3, GLIMPSE_IRAC1, GLIMPSE_IRAC2, GLIMPSE_IRAC3, GLIMPSE_IRAC4, MIPSGAL_MIPS1, MIPSGAL_MIPS2, HIGAL_PACS1, HIGAL_PACS3, HIGAL_SPIRE1, HIGAL_SPIRE2, HIGAL_SPIRE3, WISESURVEY_WISE1, WISESURVEY_WISE2, WISESURVEY_WISE3, WISESURVEY_WISE4, TWOMASSSURVEY_J, TWOMASSSURVEY_H, TWOMASSSURVEY_K def test_run(): from fluxcompensator.psf import GaussianPSF psf_object = GaussianPSF(diameter=350.) import fluxcompensator.database.missions as filters filter_object = getattr(filters, 'PACS1_FILTER') pipe = CompactPipeline(PSF_object=psf_object, PSF_resolution=3., filter_object=filter_object) def test_run_test_filter(): assert IRAC1.filter_object.waf_0 == 3.550 assert IRAC2.filter_object.waf_0 == 4.493 assert IRAC3.filter_object.waf_0 == 5.731 assert IRAC4.filter_object.waf_0 == 7.872 assert MIPS1.filter_object.waf_0 == 23.68 assert MIPS2.filter_object.waf_0 == 71.42 assert MIPS3.filter_object.waf_0 == 155.9 assert PACS1.filter_object.waf_0 == 70. assert PACS2.filter_object.waf_0 == 100. assert PACS3.filter_object.waf_0 == 160. assert SPIRE1.filter_object.waf_0 == 250. assert SPIRE2.filter_object.waf_0 == 350. assert SPIRE3.filter_object.waf_0 == 500.
40.277778
430
0.753103
204
1,450
5.073529
0.357843
0.185507
0.188406
0.200966
0
0
0
0
0
0
0
0.085246
0.158621
1,450
35
431
41.428571
0.763115
0
0
0
0
0
0.008276
0
0
0
0
0
0.541667
1
0.083333
false
0
0.25
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
3
33879b0382df18d933b17711b9d4d5d2dd0f346f
145
py
Python
src/apiCall/urls.py
Akash16s/youtube-video-fetch-api
a662a089c33efd0f7454f7e34b4f291ffdc8a9ff
[ "Apache-2.0" ]
1
2021-11-14T10:30:16.000Z
2021-11-14T10:30:16.000Z
src/apiCall/urls.py
Akash16s/youtube-video-fetch-api
a662a089c33efd0f7454f7e34b4f291ffdc8a9ff
[ "Apache-2.0" ]
5
2021-03-30T13:22:16.000Z
2021-09-22T18:57:04.000Z
src/apiCall/urls.py
Akash16s/youtube-video-fetch-api
a662a089c33efd0f7454f7e34b4f291ffdc8a9ff
[ "Apache-2.0" ]
2
2020-09-04T04:35:35.000Z
2021-12-19T15:56:21.000Z
from django.urls import path from .views import * urlpatterns = [ path("youtube/", getYoutubeDetails.as_view(), name="baseline Upload"), ]
29
74
0.710345
17
145
6
0.823529
0
0
0
0
0
0
0
0
0
0
0
0.151724
145
5
75
29
0.829268
0
0
0
0
0
0.161972
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
33b974c613e670f81b154def7d425e812740f118
984
py
Python
amr2seq/span.py
masterkeywikz/seq2graph
745cb09f10c67a77c0ef517f5d58ac45f2ade09d
[ "MIT" ]
10
2017-02-25T17:26:15.000Z
2022-02-23T06:36:54.000Z
amr2seq/span.py
masterkeywikz/seq2graph
745cb09f10c67a77c0ef517f5d58ac45f2ade09d
[ "MIT" ]
null
null
null
amr2seq/span.py
masterkeywikz/seq2graph
745cb09f10c67a77c0ef517f5d58ac45f2ade09d
[ "MIT" ]
8
2016-12-22T00:36:33.000Z
2021-05-19T17:55:59.000Z
""" sentence span mapping to special concepts in amr like name,date-entity,etc. """ class Span(object): def __init__(self,start,end,words,entity_tag): self.start = start self.end = end self.entity_tag = entity_tag self.words = words def set_entity_tag(self,entity_tag): self.entity_tag = entity_tag def __str__(self): return '%s: start: %s, end: %s , tag:%s'%(self.__class__.__name__,self.start,self.end,self.entity_tag) def __repr__(self): return '%s: start: %s, end: %s , tag:%s'%(self.__class__.__name__,self.start,self.end,self.entity_tag) def __eq__(self,other): return other.start == self.start and other.end == self.end def contains(self,other_span): if other_span.start >= self.start and other_span.end <= self.end and not (other_span.start == self.start and other_span.end == self.end): return True else: return False
30.75
145
0.628049
141
984
4.049645
0.255319
0.141856
0.113835
0.084063
0.556918
0.402802
0.402802
0.402802
0.402802
0.402802
0
0
0.247967
984
31
146
31.741935
0.771622
0.07622
0
0.210526
0
0
0.068889
0
0
0
0
0
0
1
0.315789
false
0
0
0.157895
0.631579
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
33bdde4618568109b24c6a025dc322a1c25518cb
39
py
Python
azure-kusto-ingest/azure/kusto/ingest/_version.py
yaniv-ms/azure-kusto-python
b1c323c3c5b264725cede98d6f9b3ad0eb0e993d
[ "MIT" ]
null
null
null
azure-kusto-ingest/azure/kusto/ingest/_version.py
yaniv-ms/azure-kusto-python
b1c323c3c5b264725cede98d6f9b3ad0eb0e993d
[ "MIT" ]
null
null
null
azure-kusto-ingest/azure/kusto/ingest/_version.py
yaniv-ms/azure-kusto-python
b1c323c3c5b264725cede98d6f9b3ad0eb0e993d
[ "MIT" ]
null
null
null
"""Version file.""" VERSION = "0.0.21"
13
19
0.564103
6
39
3.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0.117647
0.128205
39
2
20
19.5
0.529412
0.333333
0
0
0
0
0.3
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
1d1c12689f11c5a9358989b6bd9fa886df8f2d9d
498
py
Python
test/unit/tool_util/mulled/test_mulled_build.py
corburn/galaxy
850d483bf260cba969ddb21de2f5404c15e09d1c
[ "CC-BY-3.0" ]
2
2017-03-28T12:11:41.000Z
2017-04-22T02:58:25.000Z
test/unit/tool_util/mulled/test_mulled_build.py
bioinfo1992/galaxy
46690eb76f3a1987ea2ac5ff0d3386177cf2154f
[ "CC-BY-3.0" ]
1
2019-08-19T15:24:17.000Z
2019-08-19T15:24:17.000Z
test/unit/tool_util/mulled/test_mulled_build.py
bioinfo1992/galaxy
46690eb76f3a1987ea2ac5ff0d3386177cf2154f
[ "CC-BY-3.0" ]
null
null
null
import pytest from galaxy.tool_util.deps.mulled.mulled_build import ( any_target_requires_extended_base, build_target, ) from ..util import external_dependency_management @pytest.mark.parametrize("target,requires_extended", [ ('maker', True), ('samtools', False) ]) @external_dependency_management def test_any_target_requires_extended_base(target, requires_extended): target = build_target(target) assert any_target_requires_extended_base([target]) == requires_extended
27.666667
75
0.791165
60
498
6.166667
0.433333
0.227027
0.356757
0.202703
0.354054
0.275676
0.275676
0.275676
0
0
0
0
0.116466
498
17
76
29.294118
0.840909
0
0
0
0
0
0.074297
0.048193
0
0
0
0
0.071429
1
0.071429
false
0
0.214286
0
0.285714
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
1d22201a83a2bbf1c25941488f2a1acec09ad837
2,237
py
Python
output/models/nist_data/list_pkg/unsigned_byte/schema_instance/nistschema_sv_iv_list_unsigned_byte_enumeration_2_xsd/nistschema_sv_iv_list_unsigned_byte_enumeration_2.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
1
2021-08-14T17:59:21.000Z
2021-08-14T17:59:21.000Z
output/models/nist_data/list_pkg/unsigned_byte/schema_instance/nistschema_sv_iv_list_unsigned_byte_enumeration_2_xsd/nistschema_sv_iv_list_unsigned_byte_enumeration_2.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
4
2020-02-12T21:30:44.000Z
2020-04-15T20:06:46.000Z
output/models/nist_data/list_pkg/unsigned_byte/schema_instance/nistschema_sv_iv_list_unsigned_byte_enumeration_2_xsd/nistschema_sv_iv_list_unsigned_byte_enumeration_2.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
null
null
null
from dataclasses import dataclass, field from enum import Enum from typing import Optional __NAMESPACE__ = "NISTSchema-SV-IV-list-unsignedByte-enumeration-2-NS" class NistschemaSvIvListUnsignedByteEnumeration2Type(Enum): VALUE_255_40_95_70_30_41 = ( 255, 40, 95, 70, 30, 41, ) VALUE_2_3_1_52_84_98 = ( 2, 3, 1, 52, 84, 98, ) VALUE_68_3_255_29_7_48_28_66 = ( 68, 3, 255, 29, 7, 48, 28, 66, ) VALUE_14_28_184_73_42_255_97_80_17 = ( 14, 28, 184, 73, 42, 255, 97, 80, 17, ) VALUE_13_79_255_40_6_95_197_48 = ( 13, 79, 255, 40, 6, 95, 197, 48, ) VALUE_255_8_3_59_49_62 = ( 255, 8, 3, 59, 49, 62, ) VALUE_134_71_5_52_8_255_18_24 = ( 134, 71, 5, 52, 8, 255, 18, 24, ) VALUE_10_255_3_59_47_19_126_45 = ( 10, 255, 3, 59, 47, 19, 126, 45, ) VALUE_255_3_87_4_82_248 = ( 255, 3, 87, 4, 82, 248, ) VALUE_10_3_255_81_140_92_22_83 = ( 10, 3, 255, 81, 140, 92, 22, 83, ) @dataclass class NistschemaSvIvListUnsignedByteEnumeration2: class Meta: name = "NISTSchema-SV-IV-list-unsignedByte-enumeration-2" namespace = "NISTSchema-SV-IV-list-unsignedByte-enumeration-2-NS" value: Optional[NistschemaSvIvListUnsignedByteEnumeration2Type] = field( default=None, metadata={ "required": True, } )
19.284483
76
0.386679
212
2,237
3.716981
0.372642
0.025381
0.053299
0.068528
0.560914
0.560914
0.446701
0.393401
0.185279
0
0
0.294406
0.544479
2,237
115
77
19.452174
0.478901
0
0
0.284404
0
0
0.07063
0.067054
0
0
0
0
0
1
0
false
0
0.027523
0
0.155963
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
1d265bebaac2c7621f87df69a587e32acddb1001
735
py
Python
solidity/python/FormulaNativePython/__init__.py
samuelpilcer/contracts
9e2857ea984fc07b3ef5a2bac8e64554c06b5c0e
[ "Apache-2.0" ]
null
null
null
solidity/python/FormulaNativePython/__init__.py
samuelpilcer/contracts
9e2857ea984fc07b3ef5a2bac8e64554c06b5c0e
[ "Apache-2.0" ]
null
null
null
solidity/python/FormulaNativePython/__init__.py
samuelpilcer/contracts
9e2857ea984fc07b3ef5a2bac8e64554c06b5c0e
[ "Apache-2.0" ]
1
2018-04-03T12:39:19.000Z
2018-04-03T12:39:19.000Z
from decimal import Decimal from decimal import getcontext getcontext().prec = 80 # 78 digits for a maximum of 2^256-1, and 2 more digits for after the decimal point def calculatePurchaseReturn(supply, balance, weight, amount): return Decimal(supply)*((1+Decimal(amount)/Decimal(balance))**(Decimal(weight)/1000000)-1) def calculateSaleReturn(supply, balance, weight, amount): return Decimal(balance)*(1-(1-Decimal(amount)/Decimal(supply))**(1000000/Decimal(weight))) def power(baseN, baseD, expN, expD, precision): return (Decimal(baseN)/Decimal(baseD))**(Decimal(expN)/Decimal(expD))*2**precision def ln(numerator, denominator, precision): return (Decimal(numerator)/Decimal(denominator)).ln()*2**precision
33.409091
106
0.744218
96
735
5.697917
0.395833
0.095064
0.062157
0.091408
0.13894
0.13894
0
0
0
0
0
0.045942
0.111565
735
21
107
35
0.79173
0.110204
0
0
0
0
0
0
0
0
0
0
0
1
0.363636
false
0
0.181818
0.363636
0.909091
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
1d41d63637517a0d2ddc3e749959ee2bc22da517
1,162
py
Python
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/WGL/ARB/create_context.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/WGL/ARB/create_context.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/raw/WGL/ARB/create_context.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
'''Autogenerated by xml_generate script, do not edit!''' from OpenGL import platform as _p, arrays # Code generation uses this from OpenGL.raw.WGL import _types as _cs # End users want this... from OpenGL.raw.WGL._types import * from OpenGL.raw.WGL import _errors from OpenGL.constant import Constant as _C import ctypes _EXTENSION_NAME = 'WGL_ARB_create_context' def _f( function ): return _p.createFunction( function,_p.PLATFORM.WGL,'WGL_ARB_create_context',error_checker=_errors._error_checker) ERROR_INVALID_VERSION_ARB=_C('ERROR_INVALID_VERSION_ARB',0x2095) WGL_CONTEXT_DEBUG_BIT_ARB=_C('WGL_CONTEXT_DEBUG_BIT_ARB',0x00000001) WGL_CONTEXT_FLAGS_ARB=_C('WGL_CONTEXT_FLAGS_ARB',0x2094) WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB=_C('WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB',0x00000002) WGL_CONTEXT_LAYER_PLANE_ARB=_C('WGL_CONTEXT_LAYER_PLANE_ARB',0x2093) WGL_CONTEXT_MAJOR_VERSION_ARB=_C('WGL_CONTEXT_MAJOR_VERSION_ARB',0x2091) WGL_CONTEXT_MINOR_VERSION_ARB=_C('WGL_CONTEXT_MINOR_VERSION_ARB',0x2092) @_f @_p.types(_cs.HGLRC,_cs.HDC,_cs.HGLRC,ctypes.POINTER(_cs.c_int)) def wglCreateContextAttribsARB(hDC,hShareContext,attribList):pass
48.416667
118
0.829604
179
1,162
4.882682
0.379888
0.1373
0.048055
0.09611
0.372998
0.075515
0
0
0
0
0
0.0403
0.081756
1,162
23
119
50.521739
0.778819
0.086059
0
0
1
0
0.23062
0.23062
0
0
0.04845
0
0
1
0.105263
false
0.052632
0.315789
0.052632
0.473684
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
3
1d8643819b6fd2cc7279940e0eb8e114425632a1
465
py
Python
mayan/apps/ocr/runtime.py
Syunkolee9891/Mayan-EDMS
3759a9503a264a180b74cc8518388f15ca66ac1a
[ "Apache-2.0" ]
1
2021-06-17T18:24:25.000Z
2021-06-17T18:24:25.000Z
mayan/apps/ocr/runtime.py
Syunkolee9891/Mayan-EDMS
3759a9503a264a180b74cc8518388f15ca66ac1a
[ "Apache-2.0" ]
7
2020-06-06T00:01:04.000Z
2022-01-13T01:47:17.000Z
mayan/apps/ocr/runtime.py
Syunkolee9891/Mayan-EDMS
3759a9503a264a180b74cc8518388f15ca66ac1a
[ "Apache-2.0" ]
null
null
null
from __future__ import unicode_literals import yaml try: from yaml import CSafeLoader as SafeLoader except ImportError: from yaml import SafeLoader from django.utils.module_loading import import_string from .settings import setting_ocr_backend, setting_ocr_backend_arguments ocr_backend = import_string( dotted_path=setting_ocr_backend.value )( **yaml.load( stream=setting_ocr_backend_arguments.value or '{}', Loader=SafeLoader ) )
22.142857
77
0.787097
59
465
5.864407
0.491525
0.144509
0.196532
0.150289
0
0
0
0
0
0
0
0
0.156989
465
20
78
23.25
0.882653
0
0
0
0
0
0.004301
0
0
0
0
0
0
1
0
false
0
0.533333
0
0.533333
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
d5232e97d9442219eb60dd41c97e8ec769d65418
323
py
Python
python/run-report.py
nblair/team-stats
3d832aac1804a41f2277a05e5cec1397f5c8003d
[ "Apache-2.0" ]
null
null
null
python/run-report.py
nblair/team-stats
3d832aac1804a41f2277a05e5cec1397f5c8003d
[ "Apache-2.0" ]
1
2021-06-01T23:00:01.000Z
2021-06-01T23:00:01.000Z
python/run-report.py
nblair/team-stats
3d832aac1804a41f2277a05e5cec1397f5c8003d
[ "Apache-2.0" ]
1
2018-10-18T15:51:17.000Z
2018-10-18T15:51:17.000Z
#!/usr/local/bin/python3 from jira_report import print_jira_info from github_report import get_pull_request_information from util import get_settings settings = get_settings() print("---jira info---") issueKeys = print_jira_info(settings) print() print('---pr info---') get_pull_request_information(settings, issueKeys)
23.071429
54
0.795666
45
323
5.4
0.422222
0.111111
0.160494
0.205761
0
0
0
0
0
0
0
0.003401
0.089783
323
14
55
23.071429
0.823129
0.071207
0
0
0
0
0.093333
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.555556
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
3
d52ab206e67e2796a6045d32ec7a361d591275c5
1,088
py
Python
info2021/aplicaciones/Quiz/migrations/0007_auto_20210901_0704.py
Nemo3003/Proyecto_Final_G_8
65bc0b74e5746801b373b8714fc6574f2de21f91
[ "Apache-2.0" ]
null
null
null
info2021/aplicaciones/Quiz/migrations/0007_auto_20210901_0704.py
Nemo3003/Proyecto_Final_G_8
65bc0b74e5746801b373b8714fc6574f2de21f91
[ "Apache-2.0" ]
null
null
null
info2021/aplicaciones/Quiz/migrations/0007_auto_20210901_0704.py
Nemo3003/Proyecto_Final_G_8
65bc0b74e5746801b373b8714fc6574f2de21f91
[ "Apache-2.0" ]
1
2021-09-05T23:42:46.000Z
2021-09-05T23:42:46.000Z
# Generated by Django 3.0.5 on 2021-09-01 10:04 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('Quiz', '0006_auto_20210901_0043'), ] operations = [ migrations.AlterField( model_name='elegirrespuesta', name='id', field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), ), migrations.AlterField( model_name='pregunta', name='id', field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), ), migrations.AlterField( model_name='preguntasrespondidas', name='id', field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), ), migrations.AlterField( model_name='quizusuario', name='id', field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), ), ]
32
108
0.610294
113
1,088
5.707965
0.380531
0.074419
0.155039
0.179845
0.643411
0.643411
0.643411
0.643411
0.643411
0.643411
0
0.039043
0.270221
1,088
33
109
32.969697
0.7733
0.04136
0
0.592593
1
0
0.09318
0.022094
0
0
0
0
0
1
0
false
0
0.037037
0
0.148148
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
d52df23dea66802565a0233d5e9eb68bf0c5278f
1,785
py
Python
jobya/blog/managers.py
xblzbjs/Jobya
b936ce37da86bfe8326a532dab3887fae6c65e45
[ "MIT" ]
null
null
null
jobya/blog/managers.py
xblzbjs/Jobya
b936ce37da86bfe8326a532dab3887fae6c65e45
[ "MIT" ]
2
2022-02-08T01:15:52.000Z
2022-03-31T04:24:15.000Z
jobya/blog/managers.py
xblzbjs/Jobya
b936ce37da86bfe8326a532dab3887fae6c65e45
[ "MIT" ]
null
null
null
from django.db import models from django.db.models.aggregates import Count from django.db.models.query import QuerySet class CategoryQuerySet(QuerySet): """Custom CategoryQuerySet for blog category""" def annotate_num_posts(self) -> QuerySet: return ( self.filter(posts__status="published") .annotate(num_posts=Count("posts")) .values("name", "num_posts") ) class CategoryManager(models.Manager): """Custom Category Manager""" def get_queryset(self): return CategoryQuerySet(self.model, using=self._db) def annotate_num_posts(self): return self.get_queryset().annotate_num_posts() class PostQuerySet(QuerySet): """Custom PostQuerySet for blog post""" def get_published(self): return self.filter(status="published").all().order_by("published_date") def get_draft(self): return self.filter(status="draft").all().order_by("modified") def get_by_category(self, category_name): return self.filter(category__name=category_name) def get_recent_posts(self, num=5, published=True): if published: return self.get_published()[:num] else: return self.get_draft()[:num] class PostManager(models.Manager): """Custom Post Manager""" def get_queryset(self): return PostQuerySet(self.model, using=self._db) def get_by_category(self, category_name): return self.get_queryset().get_by_category(category_name) def get_published(self): return self.get_queryset().get_published() def get_draft(self): return self.get_queryset().get_draft() def get_recent_posts(self, num=5, published=True): return self.get_queryset().get_recent_posts(num, published)
28.333333
79
0.682353
221
1,785
5.294118
0.20362
0.094017
0.077778
0.089744
0.450427
0.342735
0.136752
0.136752
0.136752
0
0
0.001404
0.202241
1,785
62
80
28.790323
0.820225
0.066667
0
0.263158
0
0
0.038298
0
0
0
0
0
0
1
0.315789
false
0
0.078947
0.289474
0.842105
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
d5396614672b74736003f2940ab104ec14c386a2
490
py
Python
src/credentialView.py
danslobodan/LibraryConsole
2ce35ddf17b960989a22940e69eadd69f936477a
[ "MIT" ]
null
null
null
src/credentialView.py
danslobodan/LibraryConsole
2ce35ddf17b960989a22940e69eadd69f936477a
[ "MIT" ]
null
null
null
src/credentialView.py
danslobodan/LibraryConsole
2ce35ddf17b960989a22940e69eadd69f936477a
[ "MIT" ]
1
2019-01-31T18:55:59.000Z
2019-01-31T18:55:59.000Z
import view def getUsername(credentials): username = view.assertInput("Username") if username in credentials: print("Username", username, "is already taken.") return "" return username def getCredentials(credentials): username = getUsername(credentials) while username == "": username = getUsername(credentials) password = view.assertInput("Password") return { "username" : username, "password" : password }
21.304348
56
0.640816
42
490
7.47619
0.404762
0.210191
0.191083
0
0
0
0
0
0
0
0
0
0.257143
490
22
57
22.272727
0.862637
0
0
0.125
0
0
0.116327
0
0
0
0
0
0.125
1
0.125
false
0.125
0.0625
0
0.375
0.0625
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
d53e70627944200373675908f8c14e9ce678b18e
69
py
Python
tests/__init__.py
arkottke/pybline
6174047e94573d058c5af9556ed8b1dc394d758e
[ "MIT" ]
null
null
null
tests/__init__.py
arkottke/pybline
6174047e94573d058c5af9556ed8b1dc394d758e
[ "MIT" ]
null
null
null
tests/__init__.py
arkottke/pybline
6174047e94573d058c5af9556ed8b1dc394d758e
[ "MIT" ]
1
2021-02-25T14:05:37.000Z
2021-02-25T14:05:37.000Z
import pathlib FPATH_DATA = pathlib.Path(__file__).parent / 'data'
13.8
51
0.753623
9
69
5.222222
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.130435
69
4
52
17.25
0.783333
0
0
0
0
0
0.057971
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
d5574f0cb29de333d5d417104903143f8f0cc93c
137
py
Python
codigos/Cap02/ex2.12.py
skunkworksdev/Ifes_Algoritmo
458ef73a304573c12b45d8afae38e13ae0f3354f
[ "MIT" ]
null
null
null
codigos/Cap02/ex2.12.py
skunkworksdev/Ifes_Algoritmo
458ef73a304573c12b45d8afae38e13ae0f3354f
[ "MIT" ]
null
null
null
codigos/Cap02/ex2.12.py
skunkworksdev/Ifes_Algoritmo
458ef73a304573c12b45d8afae38e13ae0f3354f
[ "MIT" ]
null
null
null
graus = [0,10,20,40,100] for T in graus: print("A temperatura é: ",T) print("a Lista de temperaturas tem ", len(graus), 'elementos')
19.571429
62
0.656934
24
137
3.75
0.791667
0.133333
0
0
0
0
0
0
0
0
0
0.087719
0.167883
137
6
63
22.833333
0.701754
0
0
0
0
0
0.394161
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
d55ef2d5d1d8ee68639ab6058c2ea5746cbf287f
80
py
Python
assignements/S4_queues_tools.py
Just1bridou/BachelorDIM-Lectures-Algorithms-2020
a2f23a3afff637bfa064cb83b3f18cecd069cb52
[ "MIT" ]
null
null
null
assignements/S4_queues_tools.py
Just1bridou/BachelorDIM-Lectures-Algorithms-2020
a2f23a3afff637bfa064cb83b3f18cecd069cb52
[ "MIT" ]
null
null
null
assignements/S4_queues_tools.py
Just1bridou/BachelorDIM-Lectures-Algorithms-2020
a2f23a3afff637bfa064cb83b3f18cecd069cb52
[ "MIT" ]
null
null
null
from keys import Keys key = Keys() AMQP_url = key.getAMQP_URL() print(AMQP_url)
16
28
0.75
14
80
4.071429
0.571429
0.245614
0
0
0
0
0
0
0
0
0
0
0.1375
80
5
29
16
0.826087
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.25
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
d5639f6316b2e6fe91a2dec0ab2844db72c24cb3
937
py
Python
django_falcon/system/models.py
Lukejo2/python-django-falcon
12ac52b6e241bc29a41cf4b6df66e7e2f85b1dd2
[ "MIT" ]
null
null
null
django_falcon/system/models.py
Lukejo2/python-django-falcon
12ac52b6e241bc29a41cf4b6df66e7e2f85b1dd2
[ "MIT" ]
null
null
null
django_falcon/system/models.py
Lukejo2/python-django-falcon
12ac52b6e241bc29a41cf4b6df66e7e2f85b1dd2
[ "MIT" ]
null
null
null
from django.db import models from django.utils import timezone from rest_framework import serializers # Create your models here. class SensorEvent(models.Model): name = models.CharField(max_length=128) value = models.FloatField(null=True, blank=True) unit = models.CharField(max_length=16) status = models.CharField(max_length=16) lower_non_recoverable = models.FloatField(null=True, blank=True) lower_critical = models.FloatField(null=True, blank=True) lower_non_critical = models.FloatField(null=True, blank=True) upper_non_critical = models.FloatField(null=True, blank=True) upper_critical = models.FloatField(null=True, blank=True) upper_non_recoverable = models.FloatField(null=True, blank=True) timestamp = models.DateTimeField(default=timezone.now) class SensorEventSerializer(serializers.ModelSerializer): class Meta: model = SensorEvent fields = '__all__'
32.310345
68
0.75667
116
937
5.956897
0.37069
0.162084
0.202605
0.243126
0.541245
0.465991
0.418234
0.344428
0.212735
0
0
0.008794
0.15048
937
28
69
33.464286
0.859296
0.025614
0
0
0
0
0.007684
0
0
0
0
0
0
1
0
false
0
0.157895
0
0.894737
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
6352e3854520776cae288b1173b6c51a0774f089
160
py
Python
tutorials/assets/lrp_model/__init__.py
Christophe-Jia/InterpretDL
5736cb880d3c9bd79241d2ea6cb0490d9e8b089d
[ "Apache-2.0" ]
107
2020-07-02T14:25:01.000Z
2022-03-31T18:49:01.000Z
tutorials/assets/lrp_model/__init__.py
Christophe-Jia/InterpretDL
5736cb880d3c9bd79241d2ea6cb0490d9e8b089d
[ "Apache-2.0" ]
22
2020-07-28T01:57:21.000Z
2022-03-31T07:51:36.000Z
tutorials/assets/lrp_model/__init__.py
Christophe-Jia/InterpretDL
5736cb880d3c9bd79241d2ea6cb0490d9e8b089d
[ "Apache-2.0" ]
23
2020-07-10T05:08:39.000Z
2022-03-31T10:00:04.000Z
from .resnet_lrp import * from .vgg_lrp import * __all__ = [ 'ResNet', 'resnet18', 'resnet32', 'resnet50', 'resnet101', 'resnet152', 'VGG', 'vgg16' ]
17.777778
75
0.625
17
160
5.529412
0.705882
0.191489
0
0
0
0
0
0
0
0
0
0.107692
0.1875
160
8
76
20
0.615385
0
0
0
0
0
0.35
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
635444b1b5650e3c31fbd780002ce0e3df71654e
1,009
py
Python
student_lab2/tests/q1_2.py
ds-modules/DATA-198-SP21
eedff5e04a5fcb50032ff9f746b164bf8658030f
[ "BSD-3-Clause" ]
null
null
null
student_lab2/tests/q1_2.py
ds-modules/DATA-198-SP21
eedff5e04a5fcb50032ff9f746b164bf8658030f
[ "BSD-3-Clause" ]
1
2021-03-29T16:48:19.000Z
2021-03-29T16:48:19.000Z
student_lab2/tests/q1_2.py
ds-modules/DATA-198-SP21
eedff5e04a5fcb50032ff9f746b164bf8658030f
[ "BSD-3-Clause" ]
null
null
null
test = { 'name': 'q1_2', 'points': 1, 'suites': [ { 'cases': [ {'code': ">>> assert trending_vids.shape[0] == '40379'\n", 'hidden': False, 'locked': False}, {'code': ">>> assert trending_vids.iloc[0, 0] == '25231'\n", 'hidden': False, 'locked': False}, {'code': ">>> assert trending_vids.iloc[0, 4] == 'Inside Edition'\n", 'hidden': False, 'locked': False}, {'code': ">>> assert trending_vids.loc[0, 'views'] == 542677.0\n", 'hidden': False, 'locked': False}, {'code': ">>> assert trending_vids.loc[2, 'likes'] == 11390.0\n", 'hidden': False, 'locked': False}, {'code': ">>> assert trending_vids.loc[3, 'dislikes'] == 175.0\n", 'hidden': False, 'locked': False}], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest'}]}
77.615385
139
0.424182
95
1,009
4.431579
0.4
0.142518
0.256532
0.313539
0.64133
0.64133
0.584323
0.584323
0.584323
0.584323
0
0.059006
0.361744
1,009
12
140
84.083333
0.594721
0
0
0
0
0
0.45887
0.063429
0
0
0
0
0.5
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
3
6361a28b771de3507c3f04ed351dccd973635336
1,819
py
Python
quorra/models.py
bprinty/quorra-python
82b6583992c6db4f16ee0d74ea7b1fe5e84cf03c
[ "Apache-2.0" ]
null
null
null
quorra/models.py
bprinty/quorra-python
82b6583992c6db4f16ee0d74ea7b1fe5e84cf03c
[ "Apache-2.0" ]
null
null
null
quorra/models.py
bprinty/quorra-python
82b6583992c6db4f16ee0d74ea7b1fe5e84cf03c
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # # Methods for generating quorra plot models. # # @author <bprinty@gmail.com> # ------------------------------------------------ # imports # ------- import json import pandas # models # ------ def line(): return Plot('line') def density(): return Plot('density') def scatter(): return Plot('scatter') def bar(): return Plot('bar') def histogram(): return Plot('histogram') def pie(): return Plot('pie') def multiline(): return Plot('multiline') # objects # ------- class Plot(object): def __init__(self, model): self.model = model self.attr = {} return def __getattr__(self, item): def func(value, key=item): self.attr[key] = value return self return func def __str__(self): ret = 'quorra.{}()'.format(self.model) for key in self.attr: ret += '\n.{}({})'.format(key, json.dumps(self.attr[key])) return ret def data(self, data, x=None, y=None, group=None, label=None): if isinstance(data, pandas.DataFrame): self.attr['data'] = [] for idx in range(0, len(data)): entry = {} if x is not None: entry['x'] = data[x][idx] if y is not None: entry['y'] = data[y][idx] if group is not None: entry['group'] = data[group][idx] if label is not None: entry['label'] = data[label][idx] self.attr['data'].append(entry) elif isinstance(data, list, tuple): self.attr['data'] = data else: raise AssertionError('Input data must be one of type: (list, tuple, pandas.DataFrame)') return self
21.4
99
0.498626
207
1,819
4.323672
0.352657
0.078212
0.040223
0.06257
0
0
0
0
0
0
0
0.001641
0.329852
1,819
84
100
21.654762
0.732568
0.102804
0
0.04
0
0
0.092032
0
0
0
0
0
0.02
1
0.24
false
0
0.04
0.14
0.54
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
63812e727bc4f343e752abde368afd8133a49c71
58,222
py
Python
malaya/path/__init__.py
illaiza115/malaya
002a885db0681ebf056f53b3b1b11fd99687eef1
[ "MIT" ]
1
2021-01-06T07:15:41.000Z
2021-01-06T07:15:41.000Z
malaya/path/__init__.py
illaiza115/malaya
002a885db0681ebf056f53b3b1b11fd99687eef1
[ "MIT" ]
null
null
null
malaya/path/__init__.py
illaiza115/malaya
002a885db0681ebf056f53b3b1b11fd99687eef1
[ "MIT" ]
null
null
null
from malaya import home MALAY_TEXT = home + '/dictionary/malay-text.txt' MALAY_TEXT_200K = home + '/dictionary-200k/malay-text.txt' # sorted based on modules, started from augmentation until toxic PATH_AUGMENTATION = { 'synonym': { 'model': home + '/synonym/synonym0.json', 'model2': home + '/synonym/synonym1.json', 'version': 'v35', } } S3_PATH_AUGMENTATION = { 'synonym': { 'model': 'https://raw.githubusercontent.com/huseinzol05/Malaya-Dataset/master/dictionary/synonym/synonym0.json', 'model2': 'https://raw.githubusercontent.com/huseinzol05/Malaya-Dataset/master/dictionary/synonym/synonym1.json', } } PATH_CONSTITUENCY = { 'bert': { 'model': home + '/constituency/bert/base/model.pb', 'quantized': home + '/constituency/bert/base/quantized/model.pb', 'dictionary': home + '/constituency/bert/base/vocab.json', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v38', }, 'tiny-bert': { 'model': home + '/constituency/bert/tiny/model.pb', 'quantized': home + '/constituency/bert/tiny/quantized/model.pb', 'dictionary': home + '/constituency/bert/tiny/vocab.json', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v38', }, 'albert': { 'model': home + '/constituency/albert/base/model.pb', 'quantized': home + '/constituency/albert/base/quantized/model.pb', 'dictionary': home + '/constituency/albert/base/vocab.json', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v38', }, 'tiny-albert': { 'model': home + '/constituency/albert/tiny/model.pb', 'quantized': home + '/constituency/albert/tiny/quantized/model.pb', 'dictionary': home + '/constituency/albert/tiny/vocab.json', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v38', }, 'xlnet': { 'model': home + '/constituency/xlnet/base/model.pb', 'quantized': home + '/constituency/xlnet/base/quantized/model.pb', 'dictionary': home + '/constituency/xlnet/base/vocab.json', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'version': 'v38', }, } S3_PATH_CONSTITUENCY = { 'bert': { 'model': 'v38/constituency/bert-base.pb', 'quantized': 'v40/constituency/bert-base.pb.quantized', 'dictionary': 'v38/constituency/vocab-bert-base.json', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'tiny-bert': { 'model': 'v38/constituency/tiny-bert.pb', 'quantized': 'v40/constituency/tiny-bert.pb.quantized', 'dictionary': 'v38/constituency/vocab-tiny-bert.json', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'albert': { 'model': 'v38/constituency/albert-base.pb', 'quantized': 'v40/constituency/albert-base.pb.quantized', 'dictionary': 'v38/constituency/vocab-albert-base.json', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'tiny-albert': { 'model': 'v38/constituency/albert-tiny.pb', 'quantized': 'v40/constituency/albert-tiny.pb.quantized', 'dictionary': 'v38/constituency/vocab-albert-tiny.json', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'xlnet': { 'model': 'v40/constituency/xlnet-base.pb', 'quantized': 'v40/constituency/xlnet-base.pb.quantized', 'dictionary': 'v40/constituency/vocab-xlnet-base.json', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, } PATH_DEPENDENCY = { 'bert': { 'model': home + '/dependency/bert/base/model.pb', 'quantized': home + '/dependency/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'tiny-bert': { 'model': home + '/dependency/bert/tiny/model.pb', 'quantized': home + '/dependency/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'albert': { 'model': home + '/dependency/albert/base/model.pb', 'quantized': home + '/dependency/albert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v34', }, 'tiny-albert': { 'model': home + '/dependency/albert/tiny/model.pb', 'quantized': home + '/dependency/albert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v34', }, 'xlnet': { 'model': home + '/dependency/xlnet/base/model.pb', 'quantized': home + '/dependency/xlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'version': 'v34', }, 'alxlnet': { 'model': home + '/dependency/alxlnet/base/model.pb', 'quantized': home + '/dependency/alxlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'version': 'v34', }, } S3_PATH_DEPENDENCY = { 'bert': { 'model': 'v34/dependency/bert-base-dependency.pb', 'quantized': 'v40/dependency/bert-base-dependency.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'tiny-bert': { 'model': 'v34/dependency/tiny-bert-dependency.pb', 'quantized': 'v40/dependency/tiny-bert-dependency.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'albert': { 'model': 'v34/dependency/albert-base-dependency.pb', 'quantized': 'v40/dependency/albert-base-dependency.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'tiny-albert': { 'model': 'v34/dependency/albert-tiny-dependency.pb', 'quantized': 'v40/dependency/albert-tiny-dependency.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'xlnet': { 'model': 'v34/dependency/xlnet-base-dependency.pb', 'quantized': 'v40/dependency/xlnet-base-dependency.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, 'alxlnet': { 'model': 'v34/dependency/alxlnet-base-dependency.pb', 'quantized': 'v34/dependency/alxlnet-base-dependency.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, } PATH_EMOTION = { 'multinomial': { 'model': home + '/emotion/multinomial/multinomial.pkl', 'vector': home + '/emotion/multinomial/tfidf.pkl', 'bpe': home + '/emotion/multinomial/bpe.model', 'version': 'v34', }, 'bert': { 'model': home + '/emotion/bert/base/model.pb', 'quantized': home + '/emotion/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'tiny-bert': { 'model': home + '/emotion/bert/tiny/model.pb', 'quantized': home + '/emotion/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'albert': { 'model': home + '/emotion/albert/base/model.pb', 'quantized': home + '/emotion/albert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v34', }, 'tiny-albert': { 'model': home + '/emotion/albert/tiny/model.pb', 'quantized': home + '/emotion/albert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v34', }, 'xlnet': { 'model': home + '/emotion/xlnet/base/model.pb', 'quantized': home + '/emotion/xlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'version': 'v34', }, 'alxlnet': { 'model': home + '/emotion/alxlnet/base/model.pb', 'quantized': home + '/emotion/alxlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'version': 'v34', }, } S3_PATH_EMOTION = { 'multinomial': { 'model': 'v34/emotion/multinomial.pkl', 'vector': 'v34/emotion/tfidf.pkl', 'bpe': 'v34/emotion/bpe.model', }, 'bert': { 'model': 'v34/emotion/bert-base-emotion.pb', 'quantized': 'v40/emotion/bert-base-emotion.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'tiny-bert': { 'model': 'v34/emotion/tiny-bert-emotion.pb', 'quantized': 'v40/emotion/tiny-bert-emotion.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'albert': { 'model': 'v34/emotion/albert-base-emotion.pb', 'quantized': 'v40/emotion/albert-base-emotion.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'tiny-albert': { 'model': 'v34/emotion/albert-tiny-emotion.pb', 'quantized': 'v40/emotion/albert-tiny-emotion.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'xlnet': { 'model': 'v34/emotion/xlnet-base-emotion.pb', 'quantized': 'v40/emotion/xlnet-base-emotion.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, 'alxlnet': { 'model': 'v34/emotion/alxlnet-base-emotion.pb', 'quantized': 'v40/emotion/alxlnet-base-emotion.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, } PATH_ENTITIES = { 'bert': { 'model': home + '/entity/bert/base/model.pb', 'quantized': home + '/entity/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'setting': home + '/entity/dictionary-entities.json', 'version': 'v34', }, 'tiny-bert': { 'model': home + '/entity/bert/tiny/model.pb', 'quantized': home + '/entity/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'setting': home + '/entity/dictionary-entities.json', 'version': 'v34', }, 'albert': { 'model': home + '/entity/albert/base/model.pb', 'quantized': home + '/entity/albert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'setting': home + '/entity/dictionary-entities.json', 'version': 'v34', }, 'tiny-albert': { 'model': home + '/entity/albert/tiny/model.pb', 'quantized': home + '/entity/albert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'setting': home + '/entity/dictionary-entities.json', 'version': 'v34', }, 'xlnet': { 'model': home + '/entity/xlnet/base/model.pb', 'quantized': home + '/entity/xlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'setting': home + '/entity/dictionary-entities.json', 'version': 'v34', }, 'alxlnet': { 'model': home + '/entity/alxlnet/base/model.pb', 'quantized': home + '/entity/alxlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'setting': home + '/entity/dictionary-entities.json', 'version': 'v34', }, } S3_PATH_ENTITIES = { 'bert': { 'model': 'v34/entity/bert-base-entity.pb', 'quantized': 'v40/entity/bert-base-entity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', 'setting': 'bert-bahasa/dictionary-entities.json', }, 'tiny-bert': { 'model': 'v34/entity/tiny-bert-entity.pb', 'quantized': 'v40/entity/tiny-bert-entity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', 'setting': 'bert-bahasa/dictionary-entities.json', }, 'albert': { 'model': 'v34/entity/albert-base-entity.pb', 'quantized': 'v40/entity/albert-base-entity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', 'setting': 'bert-bahasa/dictionary-entities.json', }, 'tiny-albert': { 'model': 'v34/entity/albert-tiny-entity.pb', 'quantized': 'v40/entity/albert-tiny-entity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', 'setting': 'bert-bahasa/dictionary-entities.json', }, 'xlnet': { 'model': 'v34/entity/xlnet-base-entity.pb', 'quantized': 'v40/entity/xlnet-base-entity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', 'setting': 'bert-bahasa/dictionary-entities.json', }, 'alxlnet': { 'model': 'v34/entity/alxlnet-base-entity.pb', 'quantized': 'v40/entity/alxlnet-base-entity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', 'setting': 'bert-bahasa/dictionary-entities.json', }, } PATH_GENERATOR = { 't5-compressed': { 'base': { 'path': home + '/generator/t5-compressed/base', 'directory': home + '/generator/t5-compressed/base/model/', 'model': { 'model': home + '/generator/t5-compressed/base/generator-t5-base.tar.gz', 'version': 'v35', }, }, 'small': { 'path': home + '/generator/t5-compressed/small', 'directory': home + '/generator/t5-compressed/small/model/', 'model': { 'model': home + '/generator/t5-compressed/small/generator-t5-small.tar.gz', 'version': 'v35', }, }, }, 't5': { 'base': { 'model': home + '/generator/t5/base/model.pb', 'quantized': home + '/generator/t5/base/quantized/model.pb', 'version': 'v38', }, 'small': { 'model': home + '/generator/t5/small/model.pb', 'quantized': home + '/generator/t5/small/quantized/model.pb', 'version': 'v38', }, }, } S3_PATH_GENERATOR = { 't5-compressed': { 'base': {'model': 'v35/generator/sample-generator-t5-base.tar.gz'}, 'small': {'model': 'v35/generator/sample-generator-t5-small.tar.gz'}, }, 't5': { 'base': { 'model': 'v38/generator/base.pb', 'quantized': 'v40/generator/base.pb.quantized', }, 'small': { 'model': 'v38/generator/small.pb', 'quantized': 'v40/generator/small.pb.quantized', }, }, } PATH_LANG_DETECTION = { 'fasttext-original': { 'model': home + '/language-detection/fasttext-original/fasstext.bin', 'version': 'v34', }, 'fasttext-quantized': { 'model': home + '/language-detection/fasttext-quantized/fasstext.tfz', 'version': 'v34', }, 'deep': { 'model': home + '/language-detection/deep/model.ckpt.data-00000-of-00001', 'index': home + '/language-detection/deep/model.ckpt.index', 'meta': home + '/language-detection/deep/model.ckpt.meta', 'vector': home + '/language-detection/deep/vectorizer-language-detection.pkl', 'bpe': home + '/language-detection/deep/bpe.model', 'version': 'v34', }, } S3_PATH_LANG_DETECTION = { 'fasttext-original': { 'model': 'v34/language-detection/fasttext-malaya.bin' }, 'fasttext-quantized': { 'model': 'v34/language-detection/fasttext-malaya.ftz' }, 'deep': { 'model': 'v34/language-detection/model.ckpt.data-00000-of-00001', 'index': 'v34/language-detection/model.ckpt.index', 'meta': 'v34/language-detection/model.ckpt.meta', 'vector': 'v34/language-detection/bow-language-detection.pkl', 'bpe': 'v34/language-detection/language-detection.model', }, } PATH_NGRAM = { 1: { 'model': home + '/preprocessing/ngram1/bm_1grams.json', 'version': 'v28', }, 2: { 'model': home + '/preprocessing/ngram2/bm_2grams.json', 'version': 'v23', }, 'symspell': { 'model': home + '/preprocessing/symspell/bm_1grams.txt', 'version': 'v28', }, 'sentencepiece': { 'vocab': home + '/preprocessing/sentencepiece/sp10m.cased.v4.vocab', 'model': home + '/preprocessing/sentencepiece/sp10m.cased.v4.model', 'version': 'v31', }, } S3_PATH_NGRAM = { 1: {'model': 'v27/preprocessing/bm_1grams.json'}, 2: {'model': 'v23/preprocessing/bm_2grams.json'}, 'symspell': {'model': 'v27/preprocessing/bm_1grams.txt'}, 'sentencepiece': { 'vocab': 'bert-bahasa/sp10m.cased.v4.vocab', 'model': 'bert-bahasa/sp10m.cased.v4.model', }, } PATH_NSFW = { 'lexicon': {'model': home + '/nsfw/lexicon.json', 'version': 'v39'}, 'multinomial': { 'model': home + '/nsfw/multinomial/multinomial.pkl', 'vector': home + '/nsfw/multinomial/tfidf.pkl', 'bpe': home + '/nsfw/multinomial/bpe.model', 'version': 'v39', }, } S3_PATH_NSFW = { 'lexicon': {'model': 'v39/nsfw/nsfw-corpus.json'}, 'multinomial': { 'model': 'v39/nsfw/multinomial-nsfw.pkl', 'vector': 'v39/nsfw/tfidf-nsfw.pkl', 'bpe': 'v39/nsfw/nsfw.model', }, } PATH_PARAPHRASE = { 't5-compressed': { 'base': { 'path': home + '/paraphrase/t5-compressed/base', 'directory': home + '/paraphrase/t5-compressed/base/model/', 'model': { 'model': home + '/paraphrase/t5-compressed/base/paraphrase-t5-base.tar.gz', 'version': 'v36', }, }, 'small': { 'path': home + '/paraphrase/t5-compressed/small', 'directory': home + '/paraphrase/t5-compressed/small/model/', 'model': { 'model': home + '/paraphrase/t5-compressed/small/paraphrase-t5-small.tar.gz', 'version': 'v36', }, }, }, 't5': { 'base': { 'model': home + '/paraphrase/t5/base/model.pb', 'quantized': home + '/paraphrase/t5/base/quantized/model.pb', 'version': 'v38', }, 'small': { 'model': home + '/paraphrase/t5/small/model.pb', 'quantized': home + '/paraphrase/t5/small/quantized/model.pb', 'version': 'v38', }, }, 'transformer': { 'base': { 'model': home + '/paraphrase/transformer/base/model.pb', 'quantized': home + '/paraphrase/transformer/base/quantized/model.pb', 'vocab': home + '/paraphrase/sp10m.cased.t5.model', 'version': 'v39', }, 'small': { 'model': home + '/paraphrase/transformer/small/model.pb', 'quantized': home + '/paraphrase/transformer/small/quantized/model.pb', 'vocab': home + '/paraphrase/sp10m.cased.t5.model', 'version': 'v39', }, }, } S3_PATH_PARAPHRASE = { 't5-compressed': { 'base': {'model': 'v36/paraphrase/paraphrase-t5-base.tar.gz'}, 'small': {'model': 'v36/paraphrase/paraphrase-t5-small.tar.gz'}, }, 't5': { 'base': { 'model': 'v38/paraphrase/base.pb', 'quantized': 'v40/paraphrase/base.pb.t5.quantized', }, 'small': { 'model': 'v38/paraphrase/small.pb', 'quantized': 'v40/paraphrase/small.pb.t5.quantized', }, }, 'transformer': { 'base': { 'model': 'v39/paraphrase/base.pb', 'quantized': 'v40/paraphrase/base.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.t5.model', }, 'small': { 'model': 'v39/paraphrase/small.pb', 'quantized': 'v40/paraphrase/small.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.t5.model', }, }, } PATH_POS = { 'bert': { 'model': home + '/pos/bert/base/model.pb', 'quantized': home + '/pos/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'setting': home + '/pos/dictionary-pos.json', 'version': 'v34', }, 'tiny-bert': { 'model': home + '/pos/bert/tiny/model.pb', 'quantized': home + '/pos/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'setting': home + '/pos/dictionary-pos.json', 'version': 'v34', }, 'albert': { 'model': home + '/pos/albert/base/model.pb', 'quantized': home + '/pos/albert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'setting': home + '/pos/dictionary-pos.json', 'version': 'v34', }, 'tiny-albert': { 'model': home + '/pos/albert/tiny/model.pb', 'quantized': home + '/pos/albert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'setting': home + '/pos/dictionary-pos.json', 'version': 'v34', }, 'xlnet': { 'model': home + '/pos/xlnet/base/model.pb', 'quantized': home + '/pos/xlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'setting': home + '/pos/dictionary-pos.json', 'version': 'v34', }, 'alxlnet': { 'model': home + '/pos/alxlnet/base/model.pb', 'quantized': home + '/pos/alxlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'setting': home + '/pos/dictionary-pos.json', 'version': 'v34', }, } S3_PATH_POS = { 'bert': { 'model': 'v34/pos/bert-base-pos.pb', 'quantized': 'v40/pos/bert-base-pos.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', 'setting': 'bert-bahasa/dictionary-pos.json', }, 'tiny-bert': { 'model': 'v34/pos/tiny-bert-pos.pb', 'quantized': 'v40/pos/tiny-bert-pos.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', 'setting': 'bert-bahasa/dictionary-pos.json', }, 'albert': { 'model': 'v34/pos/albert-base-pos.pb', 'quantized': 'v40/pos/albert-base-pos.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', 'setting': 'bert-bahasa/dictionary-pos.json', }, 'tiny-albert': { 'model': 'v34/pos/albert-tiny-pos.pb', 'quantized': 'v40/pos/albert-tiny-pos.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', 'setting': 'bert-bahasa/dictionary-pos.json', }, 'xlnet': { 'model': 'v34/pos/xlnet-base-pos.pb', 'quantized': 'v40/pos/xlnet-base-pos.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', 'setting': 'bert-bahasa/dictionary-pos.json', }, 'alxlnet': { 'model': 'v34/pos/alxlnet-base-pos.pb', 'quantized': 'v40/pos/alxlnet-base-pos.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', 'setting': 'bert-bahasa/dictionary-pos.json', }, } PATH_PREPROCESSING = { 1: { 'model': home + '/preprocessing/count1/1counts_1grams.json', 'version': 'v23', }, 2: { 'model': home + '/preprocessing/count2/counts_2grams.json', 'version': 'v23', }, 'english-malay': { 'model': home + '/preprocessing/english-malay/english-malay-200k.json', 'version': 'v23', }, } S3_PATH_PREPROCESSING = { 1: {'model': 'v23/preprocessing/counts_1grams.json'}, 2: {'model': 'v23/preprocessing/counts_2grams.json'}, 'english-malay': {'model': 'v23/preprocessing/english-malay-200k.json'}, } PATH_RELEVANCY = { 'bert': { 'model': home + '/relevancy/bert/base/model.pb', 'quantized': home + '/relevancy/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v40', }, 'tiny-bert': { 'model': home + '/relevancy/bert/tiny/model.pb', 'quantized': home + '/relevancy/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v40', }, 'albert': { 'model': home + '/relevancy/albert/base/model.pb', 'quantized': home + '/relevancy/albert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v40', }, 'tiny-albert': { 'model': home + '/relevancy/albert/tiny/model.pb', 'quantized': home + '/relevancy/albert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v40', }, 'xlnet': { 'model': home + '/relevancy/xlnet/base/model.pb', 'quantized': home + '/relevancy/xlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'version': 'v40', }, 'alxlnet': { 'model': home + '/relevancy/alxlnet/base/model.pb', 'quantized': home + '/relevancy/alxlnet/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v9.vocab', 'tokenizer': home + '/bert/sp10m.cased.v9.model', 'version': 'v40', }, } S3_PATH_RELEVANCY = { 'bert': { 'model': 'v40/relevancy/bert-base-relevancy.pb', 'quantized': 'v40/relevancy/bert-base-relevancy.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'tiny-bert': { 'model': 'v40/relevancy/tiny-bert-relevancy.pb', 'quantized': 'v40/relevancy/tiny-bert-relevancy.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'albert': { 'model': 'v40/relevancy/albert-base-relevancy.pb', 'quantized': 'v40/relevancy/albert-base-relevancy.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'tiny-albert': { 'model': 'v40/relevancy/albert-tiny-relevancy.pb', 'quantized': 'v40/relevancy/albert-tiny-relevancy.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'xlnet': { 'model': 'v40/relevancy/xlnet-base-relevancy.pb', 'quantized': 'v40/relevancy/xlnet-base-relevancy.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, 'alxlnet': { 'model': 'v40/relevancy/alxlnet-base-relevancy.pb', 'quantized': 'v40/relevancy/alxlnet-base-relevancy.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, } PATH_SEGMENTATION = { 'base': { 'model': home + '/segmentation/base.pb', 'quantized': home + '/segmentation/quantized/base.pb', 'vocab': home + '/segmentation/vocab.yttm', 'version': 'v40', }, 'small': { 'model': home + '/segmentation/small.pb', 'quantized': home + '/segmentation/quantized/small.pb', 'vocab': home + '/segmentation/vocab.yttm', 'version': 'v40', }, } S3_PATH_SEGMENTATION = { 'base': { 'model': 'v40/segmentation/base.pb', 'quantized': 'v40/segmentation/base.pb.quantized', 'vocab': 'tokenizer/segmentation.yttm', }, 'small': { 'model': 'v40/segmentation/small.pb', 'quantized': 'v40/segmentation/small.pb.quantized', 'vocab': 'tokenizer/segmentation.yttm', }, } PATH_SENTIMENT = { 'multinomial': { 'model': home + '/sentiment/multinomial/multinomial.pkl', 'vector': home + '/sentiment/multinomial/tfidf.pkl', 'bpe': home + '/sentiment/multinomial/bpe.model', 'version': 'v34', }, 'bert': { 'model': home + '/sentiment/bert/base/model.pb', 'quantized': home + '/sentiment/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'tiny-bert': { 'model': home + '/sentiment/bert/tiny/model.pb', 'quantized': home + '/sentiment/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'albert': { 'model': home + '/sentiment/albert/base/model.pb', 'quantized': home + '/sentiment/albert/base/quantized/model.pb', 'vocab': home + '/albert/sp10m.cased.v10.vocab', 'tokenizer': home + '/albert/sp10m.cased.v10.model', 'version': 'v34', }, 'tiny-albert': { 'model': home + '/sentiment/albert/tiny/model.pb', 'quantized': home + '/sentiment/albert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'xlnet': { 'model': home + '/sentiment/xlnet/base/model.pb', 'quantized': home + '/sentiment/xlnet/base/quantized/model.pb', 'vocab': home + '/xlnet/sp10m.cased.v9.vocab', 'tokenizer': home + '/xlnet/sp10m.cased.v9.model', 'version': 'v34', }, 'alxlnet': { 'model': home + '/sentiment/alxlnet/base/model.pb', 'quantized': home + '/sentiment/alxlnet/base/quantized/model.pb', 'vocab': home + '/xlnet/sp10m.cased.v9.vocab', 'tokenizer': home + '/xlnet/sp10m.cased.v9.model', 'version': 'v34', }, } S3_PATH_SENTIMENT = { 'multinomial': { 'model': 'v34/sentiment/multinomial.pkl', 'vector': 'v34/sentiment/tfidf.pkl', 'bpe': 'v34/sentiment/bpe.model', }, 'bert': { 'model': 'v34/sentiment/bert-base-sentiment.pb', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', 'quantized': 'v40/sentiment/bert-base-sentiment.pb.quantized', }, 'tiny-bert': { 'model': 'v34/sentiment/tiny-bert-sentiment.pb', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', 'quantized': 'v40/sentiment/tiny-bert-sentiment.pb.quantized', }, 'albert': { 'model': 'v34/sentiment/albert-base-sentiment.pb', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', 'quantized': 'v40/sentiment/albert-base-sentiment.pb.quantized', }, 'tiny-albert': { 'model': 'v34/sentiment/albert-tiny-sentiment.pb', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', 'quantized': 'v40/sentiment/albert-tiny-sentiment.pb.quantized', }, 'xlnet': { 'model': 'v34/sentiment/xlnet-base-sentiment.pb', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', 'quantized': 'v40/sentiment/xlnet-base-sentiment.pb.quantized', }, 'alxlnet': { 'model': 'v34/sentiment/alxlnet-base-sentiment.pb', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', 'quantized': 'v40/sentiment/alxlnet-base-sentiment.pb.quantized', }, } PATH_SIMILARITY = { 'bert': { 'model': home + '/similarity/bert/base/model.pb', 'quantized': home + '/similarity/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v36', }, 'tiny-bert': { 'model': home + '/similarity/bert/tiny/model.pb', 'quantized': home + '/similarity/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v36', }, 'albert': { 'model': home + '/similarity/albert/base/model.pb', 'quantized': home + '/similarity/albert/base/quantized/model.pb', 'vocab': home + '/albert/sp10m.cased.v10.vocab', 'tokenizer': home + '/albert/sp10m.cased.v10.model', 'version': 'v36', }, 'tiny-albert': { 'model': home + '/similarity/albert/tiny/model.pb', 'quantized': home + '/similarity/albert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v36', }, 'xlnet': { 'model': home + '/similarity/xlnet/base/model.pb', 'quantized': home + '/similarity/xlnet/base/quantized/model.pb', 'vocab': home + '/xlnet/sp10m.cased.v9.vocab', 'tokenizer': home + '/xlnet/sp10m.cased.v9.model', 'version': 'v36', }, 'alxlnet': { 'model': home + '/similarity/alxlnet/base/model.pb', 'quantized': home + '/similarity/alxlnet/base/quantized/model.pb', 'vocab': home + '/xlnet/sp10m.cased.v9.vocab', 'tokenizer': home + '/xlnet/sp10m.cased.v9.model', 'version': 'v36', }, } S3_PATH_SIMILARITY = { 'bert': { 'model': 'v36/similarity/bert-base-similarity.pb', 'quantized': 'v40/similarity/bert-base-similarity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'tiny-bert': { 'model': 'v36/similarity/tiny-bert-similarity.pb', 'quantized': 'v40/similarity/tiny-bert-similarity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'albert': { 'model': 'v36/similarity/albert-base-similarity.pb', 'quantized': 'v40/similarity/albert-base-similarity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'tiny-albert': { 'model': 'v36/similarity/albert-tiny-similarity.pb', 'quantized': 'v40/similarity/albert-tiny-similarity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'xlnet': { 'model': 'v36/similarity/xlnet-base-similarity.pb', 'quantized': 'v40/similarity/xlnet-base-similarity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, 'alxlnet': { 'model': 'v36/similarity/alxlnet-base-similarity.pb', 'quantized': 'v40/similarity/alxlnet-base-similarity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, } PATH_STEM = { 'deep': { 'model': home + '/stem/lstm/model.pb', 'quantized': home + '/stem/lstm/quantized/model.pb', 'bpe': home + '/stem/lstm/bpe.model', 'version': 'v34', } } S3_PATH_STEM = { 'deep': { 'model': 'v34/stem/model.pb', 'quantized': 'v40/stem/model.pb.quantized', 'bpe': 'v34/stem/bpe.model', } } PATH_SUBJECTIVE = { 'multinomial': { 'model': home + '/subjective/multinomial/multinomial.pkl', 'vector': home + '/subjective/multinomial/tfidf.pkl', 'bpe': home + '/subjective/multinomial/bpe.model', 'version': 'v34', }, 'bert': { 'model': home + '/subjective/bert/base/model.pb', 'quantized': home + '/subjective/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'tiny-bert': { 'model': home + '/subjective/bert/tiny/model.pb', 'quantized': home + '/subjective/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'albert': { 'model': home + '/subjective/albert/base/model.pb', 'quantized': home + '/subjective/albert/base/quantized/model.pb', 'vocab': home + '/albert/sp10m.cased.v10.vocab', 'tokenizer': home + '/albert/sp10m.cased.v10.model', 'version': 'v34', }, 'tiny-albert': { 'model': home + '/subjective/albert/tiny/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'xlnet': { 'model': home + '/subjective/xlnet/base/model.pb', 'quantized': home + '/subjective/xlnet/base/quantized/model.pb', 'vocab': home + '/xlnet/sp10m.cased.v9.vocab', 'tokenizer': home + '/xlnet/sp10m.cased.v9.model', 'version': 'v34', }, 'alxlnet': { 'model': home + '/subjective/alxlnet/base/model.pb', 'quantized': home + '/subjective/alxlnet/base/quantized/model.pb', 'vocab': home + '/xlnet/sp10m.cased.v9.vocab', 'tokenizer': home + '/xlnet/sp10m.cased.v9.model', 'version': 'v34', }, } S3_PATH_SUBJECTIVE = { 'multinomial': { 'model': 'v34/subjective/multinomial.pkl', 'vector': 'v34/subjective/tfidf.pkl', 'bpe': 'v34/subjective/bpe.model', }, 'bert': { 'model': 'v34/subjective/bert-base-subjective.pb', 'quantized': 'v40/subjective/bert-base-subjective.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'tiny-bert': { 'model': 'v34/subjective/tiny-bert-subjective.pb', 'quantized': 'v40/subjective/tiny-bert-subjective.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'albert': { 'model': 'v34/subjective/albert-base-subjective.pb', 'quantized': 'v40/subjective/albert-base-subjective.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'tiny-albert': { 'model': 'v34/subjective/albert-tiny-subjective.pb', 'quantized': 'v40/subjective/albert-tiny-subjective.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'xlnet': { 'model': 'v34/subjective/xlnet-base-subjective.pb', 'quantized': 'v40/subjective/xlnet-base-subjective.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, 'alxlnet': { 'model': 'v34/subjective/alxlnet-base-subjective.pb', 'quantized': 'v40/subjective/alxlnet-base-subjective.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, } PATH_SUMMARIZE = { 'news': { 'model': home + '/summarize/summary-news.pb', 'setting': home + '/summarize/summary-news.json', 'version': 'v13', }, 'wiki': { 'model': home + '/summarize/summary-wiki.pb', 'setting': home + '/summarize/summary-wiki.json', 'version': 'v13', }, 't5-compressed': { 'base': { 'path': home + '/summarize/t5-compressed/base', 'directory': home + '/summarize/t5-compressed/base/model/', 'model': { 'model': home + '/summarize/t5-compressed/base/summarize-t5-base.tar.gz', 'version': 'v35', }, }, 'small': { 'path': home + '/summarize/t5-compressed/small', 'directory': home + '/summarize/t5-compressed/base/model/', 'model': { 'model': home + '/summarize/t5-compressed/small/summarize-t5-base.tar.gz', 'version': 'v35', }, }, }, 't5': { 'base': { 'model': home + '/summarize/t5/base/model.pb', 'quantized': home + '/summarize/t5/base/quantized/model.pb', 'version': 'v38', }, 'small': { 'model': home + '/summarize/t5/small/model.pb', 'quantized': home + '/summarize/t5/small/quantized/model.pb', 'version': 'v38', }, }, 'transformer': { 'base': { 'model': home + '/summarize/transformer/base/model.pb', 'quantized': home + '/summarize/transformer/base/quantized/model.pb', 'vocab': home + '/summarize/sp10m.cased.t5.model', 'version': 'v39', }, 'small': { 'model': home + '/summarize/transformer/small/model.pb', 'quantized': home + '/summarize/transformer/small/quantized/model.pb', 'vocab': home + '/summarize/sp10m.cased.t5.model', 'version': 'v39', }, }, } S3_PATH_SUMMARIZE = { 'news': { 'model': 'v13/summarize/summary-news.pb', 'setting': 'v13/summarize/summary-news.json', }, 'wiki': { 'model': 'v13/summarize/summary-wiki.pb', 'setting': 'v13/summarize/summary-wiki.json', }, 't5-compressed': { 'base': {'model': 'v35/summarize/argmax-summarize-t5-base.tar.gz'}, 'small': {'model': 'v35/summarize/argmax-summarize-t5-small.tar.gz'}, }, 't5': { 'base': { 'model': 'v38/summarize/base.pb', 'quantized': 'v40/summarize/base.pb.quantized', }, 'small': { 'model': 'v38/summarize/small.pb', 'quantized': 'v40/summarize/small.pb.quantized', }, }, 'transformer': { 'base': { 'model': 'v39/summarization/base.pb', 'quantized': 'v40/summarize/transformer-base.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.t5.model', }, 'small': { 'model': 'v39/summarization/small.pb', 'quantized': 'v40/summarize/transformer-small.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.t5.model', }, }, } PATH_TOXIC = { 'multinomial': { 'model': home + '/toxicity/multinomial/multinomial.pkl', 'vector': home + '/toxicity/multinomial/tfidf.pkl', 'bpe': home + '/toxicity/multinomial/bpe.model', 'version': 'v34', }, 'bert': { 'model': home + '/toxicity/bert/base/model.pb', 'quantized': home + '/toxicity/bert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'tiny-bert': { 'model': home + '/toxicity/bert/tiny/model.pb', 'quantized': home + '/toxicity/bert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.bert.vocab', 'tokenizer': home + '/bert/sp10m.cased.bert.model', 'version': 'v34', }, 'albert': { 'model': home + '/toxicity/albert/base/model.pb', 'quantized': home + '/toxicity/albert/base/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v34', }, 'tiny-albert': { 'model': home + '/toxicity/albert/tiny/model.pb', 'quantized': home + '/toxicity/albert/tiny/quantized/model.pb', 'vocab': home + '/bert/sp10m.cased.v10.vocab', 'tokenizer': home + '/bert/sp10m.cased.v10.model', 'version': 'v34', }, 'xlnet': { 'model': home + '/toxicity/xlnet/base/model.pb', 'quantized': home + '/toxicity/xlnet/base/quantized/model.pb', 'vocab': home + '/xlnet/sp10m.cased.v9.vocab', 'tokenizer': home + '/xlnet/sp10m.cased.v9.model', 'version': 'v34', }, 'alxlnet': { 'model': home + '/toxicity/alxlnet/base/model.pb', 'quantized': home + '/toxicity/alxlnet/base/quantized/model.pb', 'vocab': home + '/alxlnet/sp10m.cased.v9.vocab', 'tokenizer': home + '/alxlnet/sp10m.cased.v9.model', 'version': 'v34', }, } S3_PATH_TOXIC = { 'multinomial': { 'model': 'v34/toxicity/multinomial.pkl', 'vector': 'v34/toxicity/tfidf.pkl', 'bpe': 'v34/toxicity/bpe.model', }, 'bert': { 'model': 'v34/toxicity/bert-base-toxicity.pb', 'quantized': 'v40/toxicity/bert-base-toxicity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'tiny-bert': { 'model': 'v34/toxicity/tiny-bert-toxicity.pb', 'quantized': 'v40/toxicity/tiny-bert-toxicity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.bert.vocab', 'tokenizer': 'tokenizer/sp10m.cased.bert.model', }, 'albert': { 'model': 'v34/toxicity/albert-base-toxicity.pb', 'quantized': 'v40/toxicity/albert-base-toxicity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'tiny-albert': { 'model': 'v34/toxicity/albert-tiny-toxicity.pb', 'quantized': 'v40/toxicity/albert-tiny-toxicity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v10.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v10.model', }, 'xlnet': { 'model': 'v34/toxicity/xlnet-base-toxicity.pb', 'quantized': 'v40/toxicity/xlnet-base-toxicity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, 'alxlnet': { 'model': 'v34/toxicity/alxlnet-base-toxicity.pb', 'quantized': 'v40/toxicity/alxlnet-base-toxicity.pb.quantized', 'vocab': 'tokenizer/sp10m.cased.v9.vocab', 'tokenizer': 'tokenizer/sp10m.cased.v9.model', }, } PATH_TRANSLATION = { 'ms-en': { 'base': { 'model': home + '/translation/ms-en/base/model.pb', 'quantized': home + '/translation/ms-en/base/quantized/model.pb', 'vocab': home + '/translation/ms-en/base/vocab.subwords', 'version': 'v37', }, 'large': { 'model': home + '/translation/ms-en/large/model.pb', 'quantized': home + '/translation/ms-en/large/quantized/model.pb', 'vocab': home + '/translation/ms-en/large/vocab.subwords', 'version': 'v37', }, 'small': { 'model': home + '/translation/ms-en/small/model.pb', 'quantized': home + '/translation/ms-en/small/quantized/model.pb', 'vocab': home + '/translation/ms-en/small/vocab.subwords', 'version': 'v37', }, }, 'en-ms': { 'base': { 'model': home + '/translation/en-ms/base/model.pb', 'quantized': home + '/translation/en-ms/base/quantized/model.pb', 'vocab': home + '/translation/en-ms/base/vocab.subwords', 'version': 'v38', }, 'large': { 'model': home + '/translation/en-ms/large/model.pb', 'quantized': home + '/translation/en-ms/large/quantized/model.pb', 'vocab': home + '/translation/en-ms/large/vocab.subwords', 'version': 'v38', }, 'small': { 'model': home + '/translation/en-ms/small/model.pb', 'quantized': home + '/translation/en-ms/small/quantized/model.pb', 'vocab': home + '/translation/en-ms/small/vocab.subwords', 'version': 'v38', }, }, } S3_PATH_TRANSLATION = { 'ms-en': { 'base': { 'model': 'v37/translation/ms-en/base-translation.pb', 'quantized': 'v40/translation/ms-en/base-translation.pb.quantized', 'vocab': 'v37/translation/ms-en/vocab.subwords', }, 'large': { 'model': 'v37/translation/ms-en/large-translation.pb', 'quantized': 'v40/translation/ms-en/large-translation.pb.quantized', 'vocab': 'v37/translation/ms-en/vocab.subwords', }, 'small': { 'model': 'v37/translation/ms-en/small-translation.pb', 'quantized': 'v40/translation/ms-en/small-translation.pb.quantized', 'vocab': 'v37/translation/ms-en/vocab.subwords', }, }, 'en-ms': { 'base': { 'model': 'v38/translation/en-ms/base-translation.pb', 'quantized': 'v40/translation/en-ms/base-translation.pb.quantized', 'vocab': 'v38/translation/en-ms/vocab.subwords', }, 'large': { 'model': 'v38/translation/en-ms/large-translation.pb', 'quantized': 'v40/translation/en-ms/large-translation.pb.quantized', 'vocab': 'v38/translation/en-ms/vocab.subwords', }, 'small': { 'model': 'v38/translation/en-ms/small-translation.pb', 'quantized': 'v38/translation/en-ms/small-translation.pb.quantized', 'vocab': 'v38/translation/en-ms/vocab.subwords', }, }, } PATH_TRUE_CASE = { 'base': { 'model': home + '/true-case/base.pb', 'quantized': home + '/true-case/quantized/base.pb', 'vocab': home + '/true-case/vocab.yttm', 'version': 'v39', }, 'small': { 'model': home + '/true-case/small.pb', 'quantized': home + '/true-case/quantized/small.pb', 'vocab': home + '/true-case/vocab.yttm', 'version': 'v39', }, } S3_PATH_TRUE_CASE = { 'base': { 'model': 'v39/true-case/base.pb', 'quantized': 'v40/true-case/base.pb.quantized', 'vocab': 'tokenizer/truecase.yttm', }, 'small': { 'model': 'v39/true-case/small.pb', 'quantized': 'v40/true-case/small.pb.quantized', 'vocab': 'tokenizer/truecase.yttm', }, } PATH_ELECTRA = { 'electra': { 'path': home + '/electra-model/base', 'directory': home + '/electra-model/base/electra-base/', 'model': { 'model': home + '/electra-model/base/electra-bahasa-base.tar.gz', 'version': 'v34', }, }, 'small-electra': { 'path': home + '/electra-model/small', 'directory': home + '/electra-model/small/electra-small/', 'model': { 'model': home + '/electra-model/small/electra-bahasa-small.tar.gz', 'version': 'v34', }, }, } S3_PATH_ELECTRA = { 'electra': {'model': 'v34/pretrained-model/electra-base.tar.gz'}, 'small-electra': {'model': 'v34/pretrained-model/electra-small.tar.gz'}, } PATH_BERT = { 'bert': { 'path': home + '/bert-model/base', 'directory': home + '/bert-model/base/bert-base-v3/', 'model': { 'model': home + '/bert-model/base/bert-bahasa-base.tar.gz', 'version': 'v34', }, }, 'tiny-bert': { 'path': home + '/bert-model/tiny', 'directory': home + '/bert-model/tiny/tiny-bert-v1/', 'model': { 'model': home + '/bert-model/tiny/tiny-bert-bahasa.tar.gz', 'version': 'v34', }, }, } S3_PATH_BERT = { 'bert': {'model': 'v34/pretrained-model/bert-base.tar.gz'}, 'tiny-bert': {'model': 'v34/pretrained-model/tiny-bert.tar.gz'}, } PATH_ALBERT = { 'albert': { 'path': home + '/albert-model/base', 'directory': home + '/albert-model/base/albert-base/', 'model': { 'model': home + '/albert-model/base/albert-bahasa-base.tar.gz', 'version': 'v34', }, }, 'tiny-albert': { 'path': home + '/albert-model/tiny', 'directory': home + '/albert-model/tiny/albert-tiny/', 'model': { 'model': home + '/albert-model/tiny/albert-bahasa-tiny.tar.gz', 'version': 'v34', }, }, } S3_PATH_ALBERT = { 'albert': {'model': 'v34/pretrained-model/albert-base.tar.gz'}, 'tiny-albert': {'model': 'v34/pretrained-model/albert-tiny.tar.gz'}, } PATH_XLNET = { 'xlnet': { 'path': home + '/xlnet-model/base', 'directory': home + '/xlnet-model/base/xlnet-base/', 'model': { 'model': home + '/xlnet-model/base/xlnet-base.tar.gz', 'version': 'v34', }, } } S3_PATH_XLNET = {'xlnet': {'model': 'v34/pretrained-model/xlnet-base.tar.gz'}} PATH_ALXLNET = { 'alxlnet': { 'path': home + '/alxlnet-model/base', 'directory': home + '/alxlnet-model/base/alxlnet-base/', 'model': { 'model': home + '/alxlnet-model/base/alxlnet-base.tar.gz', 'version': 'v34', }, } } S3_PATH_ALXLNET = { 'alxlnet': {'model': 'v34/pretrained-model/alxlnet-base.tar.gz'} } PATH_GPT2 = { '117M': { 'path': home + '/gpt2/117M/', 'directory': home + '/gpt2/117M/gpt2-bahasa-117M/', 'model': { 'model': home + '/gpt2/117M/gpt2-117M.tar.gz', 'version': 'v34', }, }, '345M': { 'path': home + '/gpt2/345M/', 'directory': home + '/gpt2/345M/gpt2-bahasa-345M/', 'model': { 'model': home + '/gpt2/345M/gpt2-345M.tar.gz', 'version': 'v34', }, }, } S3_PATH_GPT2 = { '117M': {'model': 'v34/pretrained-model/gpt2-bahasa-117M.tar.gz'}, '345M': {'model': 'v34/pretrained-model/gpt2-bahasa-345M.tar.gz'}, } PATH_WORDVECTOR = { 'news': { 'vocab': home + '/wordvector/news/wordvector.json', 'model': home + '/wordvector/news/wordvector.npy', 'version': 'v31', }, 'wikipedia': { 'vocab': home + '/wordvector/wikipedia/wordvector.json', 'model': home + '/wordvector/wikipedia/wordvector.npy', 'version': 'v31', }, 'socialmedia': { 'vocab': home + '/wordvector/socialmedia/wordvector.json', 'model': home + '/wordvector/socialmedia/wordvector.npy', 'version': 'v31', }, 'combine': { 'vocab': home + '/wordvector/combine/wordvector.json', 'model': home + '/wordvector/combine/wordvector.npy', 'version': 'v34', }, } S3_PATH_WORDVECTOR = { 'news': { 'vocab': 'bert-bahasa/word2vec-news-ms-256.json', 'model': 'bert-bahasa/word2vec-news-ms-256.npy', }, 'wikipedia': { 'vocab': 'bert-bahasa/word2vec-wiki-ms-256.json', 'model': 'bert-bahasa/word2vec-wiki-ms-256.npy', }, 'socialmedia': { 'vocab': 'bert-bahasa/word2vec-ms-socialmedia-256.json', 'model': 'bert-bahasa/word2vec-ms-socialmedia-256.npy', }, 'combine': { 'vocab': 'bert-bahasa/word2vec-combined-256.json', 'model': 'bert-bahasa/word2vec-combined-256.npy', }, }
36.919467
121
0.563361
6,118
58,222
5.34423
0.025989
0.07585
0.070896
0.052851
0.859157
0.735228
0.534653
0.470486
0.43932
0.434059
0
0.037482
0.240236
58,222
1,576
122
36.942893
0.701662
0.001065
0
0.430921
0
0.001316
0.586815
0.438598
0
0
0
0
0
1
0
false
0
0.000658
0
0.000658
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
63887f373bdf05383a23eb7964c78efcfe8c7e19
6,499
py
Python
math.py
ericpearl/python-skeletons
95ad24b666e475998e5d1cc02ed53a2188036167
[ "Apache-2.0" ]
93
2015-01-20T06:36:59.000Z
2021-11-08T09:49:59.000Z
math.py
ericpearl/python-skeletons
95ad24b666e475998e5d1cc02ed53a2188036167
[ "Apache-2.0" ]
14
2015-05-23T23:19:41.000Z
2021-12-01T01:02:36.000Z
math.py
ericpearl/python-skeletons
95ad24b666e475998e5d1cc02ed53a2188036167
[ "Apache-2.0" ]
62
2015-01-30T13:13:39.000Z
2022-03-19T08:26:45.000Z
"""Skeleton for 'math' stdlib module.""" import sys import math def ceil(x): """Return the ceiling of x as a float, the smallest integer value greater than or equal to x. :type x: numbers.Real :rtype: float """ return 0.0 if sys.version_info >= (2, 6): def copysign(x, y): """Return x with the sign of y. On a platform that supports signed zeros, copysign(1.0, -0.0) returns -1.0. :type x: numbers.Real :type y: numbers.Real :rtype: float """ return 0.0 def fabs(x): """Return the absolute value of x. :type x: numbers.Real :rtype: float """ return 0.0 if sys.version_info >= (2, 6): def factorial(x): """Return x factorial. :type x: numbers.Integral :rtype: int """ return 0 def floor(x): """Return the floor of x as a float, the largest integer value less than or equal to x. :type x: numbers.Real :rtype: float """ return 0.0 def fmod(x, y): """Return fmod(x, y), as defined by the platform C library. :type x: numbers.Real :type y: numbers.Real :rtype: float """ return 0.0 def frexp(x): """Return the mantissa and exponent of x as the pair (m, e). :type x: numbers.Real :rtype: (float, int) """ return 0.0, 0 if sys.version_info >= (2, 6): def fsum(iterable): """Return an accurate floating point sum of values in the iterable. :type iterable: collections.Iterable[numbers.Real] :rtype: float """ return 0.0 def isinf(x): """Check if the float x is positive or negative infinity. :type x: numbers.Real :rtype: bool """ return False def isnan(x): """Check if the float x is a NaN (not a number). :type x: numbers.Real :rtype: bool """ return False def ldexp(x, i): """Return x * (2**i). :type x: numbers.Real :type i: numbers.Integral :rtype: float """ return 0.0 def modf(x): """Return the fractional and integer parts of x. :type x: numbers.Real :rtype: (float, float) """ return 0.0, 0.0 if sys.version_info >= (2, 6): def trunc(x): """Return the Real value x truncated to an Integral (usually a long integer). :type x: numbers.Real :rtype: int """ return 0 def exp(x): """Return e**x. :type x: numbers.Real :rtype: float """ return 0.0 if sys.version_info >= (2, 7): def expm1(x): """Return e**x - 1. :type x: numbers.Real :rtype: float """ return 0.0 def log(x, base=math.e): """With one argument, return the natural logarithm of x (to base e). With two arguments, return the logarithm of x to the given base, calculated as log(x)/log(base). :type x: numbers.Real :type base: numbers.Real :rtype: float """ return 0.0 if sys.version_info >= (2, 6): def log1p(x): """Return the natural logarithm of 1+x (base e). :type x: numbers.Real :rtype: float """ return 0.0 def log10(x): """Return the base-10 logarithm of x. :type x: numbers.Real :rtype: float """ return 0.0 def pow(x, y): """Return x raised to the power y. :type x: numbers.Real :type y: numbers.Real :rtype: float """ return 0.0 def sqrt(x): """Return the square root of x. :type x: numbers.Real :rtype: float """ return 0.0 def acos(x): """Return the arc cosine of x, in radians. :type x: numbers.Real :rtype: float """ return 0.0 def asin(x): """Return the arc sine of x, in radians. :type x: numbers.Real :rtype: float """ return 0.0 def atan(x): """Return the arc tangent of x, in radians. :type x: numbers.Real :rtype: float """ return 0.0 def atan2(y, x): """Return atan(y / x), in radians. :type y: numbers.Real :type x: numbers.Real :rtype: float """ return 0.0 def cos(x): """Return the cosine of x radians. :type x: numbers.Real :rtype: float """ return 0.0 def hypot(x, y): """Return the Euclidean norm, sqrt(x*x + y*y). :type x: numbers.Real :type y: numbers.Real :rtype: float """ return 0.0 def sin(x): """Return the sine of x radians. :type x: numbers.Real :rtype: float """ return 0.0 def tan(x): """Return the tangent of x radians. :type x: numbers.Real :rtype: float """ return 0.0 def degrees(x): """Converts angle x from radians to degrees. :type x: numbers.Real :rtype: float """ return 0.0 def radians(x): """Converts angle x from degrees to radians. :type x: numbers.Real :rtype: float """ return 0.0 if sys.version_info >= (2, 6): def acosh(x): """Return the inverse hyperbolic cosine of x. :type x: numbers.Real :rtype: float """ return 0.0 def asinh(x): """Return the inverse hyperbolic sine of x. :type x: numbers.Real :rtype: float """ return 0.0 def atanh(x): """Return the inverse hyperbolic tangent of x. :type x: numbers.Real :rtype: float """ return 0.0 def cosh(x): """Return the hyperbolic cosine of x. :type x: numbers.Real :rtype: float """ return 0.0 def sinh(x): """Return the hyperbolic sine of x. :type x: numbers.Real :rtype: float """ return 0.0 def tanh(x): """Return the hyperbolic tangent of x. :type x: numbers.Real :rtype: float """ return 0.0 if sys.version_info >= (2, 7): def erf(x): """Return the error function at x. :type x: numbers.Real :rtype: float """ return 0.0 def erfc(x): """Return the complementary error function at x. :type x: numbers.Real :rtype: float """ return 0.0 def gamma(x): """Return the Gamma function at x. :type x: numbers.Real :rtype: float """ return 0.0 def lgamma(x): """Return the natural logarithm of the absolute value of the Gamma function at x. :type x: numbers.Real :rtype: float """ return 0.0
17.013089
79
0.542083
933
6,499
3.767417
0.166131
0.140825
0.133144
0.172973
0.62845
0.569275
0.539403
0.520626
0.503272
0.481081
0
0.025191
0.334205
6,499
381
80
17.057743
0.78715
0.554393
0
0.511111
0
0
0
0
0
0
0
0
0
1
0.444444
false
0
0.022222
0
0.911111
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
638b5ce817706d65ba1787125a1fcd4bdbbbf074
332
py
Python
arcade/intro/a. 4. adjacent-elements-product/adjacent_elements_product.py
jeury301/code-fights
379dd541aed0f3918cf1659b635ec51368b0b349
[ "MIT" ]
null
null
null
arcade/intro/a. 4. adjacent-elements-product/adjacent_elements_product.py
jeury301/code-fights
379dd541aed0f3918cf1659b635ec51368b0b349
[ "MIT" ]
null
null
null
arcade/intro/a. 4. adjacent-elements-product/adjacent_elements_product.py
jeury301/code-fights
379dd541aed0f3918cf1659b635ec51368b0b349
[ "MIT" ]
null
null
null
def adjacentElementsProduct(inputArray): first, second = 0, 1 lp = inputArray[first]*inputArray[second] for index in range(2, len(inputArray)): first = second second = index new_lp = inputArray[first]*inputArray[second] if new_lp > lp: lp = new_lp return lp
25.538462
53
0.590361
38
332
5.078947
0.447368
0.310881
0.217617
0.279793
0.341969
0
0
0
0
0
0
0.013333
0.322289
332
12
54
27.666667
0.844444
0
0
0
0
0
0
0
0
0
0
0
0
1
0.1
false
0
0
0
0.2
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
63972bb13121477bd416502e913baf2c007b0dd9
2,259
py
Python
src/nti/app/pyramid_zope/tests/test_traversal.py
NextThought/nti.app.pyramid_zope
7d2f2b1fa0a04e4973358820c1c777b6ad7e8a3c
[ "Apache-2.0" ]
1
2020-05-06T04:19:59.000Z
2020-05-06T04:19:59.000Z
src/nti/app/pyramid_zope/tests/test_traversal.py
NextThought/nti.app.pyramid_zope
7d2f2b1fa0a04e4973358820c1c777b6ad7e8a3c
[ "Apache-2.0" ]
18
2019-12-31T20:39:02.000Z
2021-04-09T16:00:15.000Z
src/nti/app/pyramid_zope/tests/test_traversal.py
NextThought/nti.app.pyramid_zope
7d2f2b1fa0a04e4973358820c1c777b6ad7e8a3c
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Tests for traversal.py. """ from __future__ import print_function, unicode_literals import unittest from zope import interface from zope.traversing import interfaces as trv_interfaces from pyramid.testing import DummyRequest from hamcrest import assert_that from hamcrest import has_entries from hamcrest import is_ from . import ConfiguringLayer from .. import traversal class TestTraversal(unittest.TestCase): def test_unicode_traversal(self): # UnicodeEncodeError is specially handled # by the traversing machinery and doesn't raise # an error. (This is in zope.traversing.) # On Python 2, this was triggered in the real world by # attempting to access a non-ASCII attribute on an object # (which isn't allowed); this happened in the real world: # getattr(self, u'\u2019', None) # Raise unicode error # On Python 3, though, that's fine and is # allowed. The UnicodEncodeError constructor takes lots of # parameters, so rather than instantiate directly, we # trigger it indirectly by encoding --- as Python2 would do. @interface.implementer(trv_interfaces.ITraversable) class BrokenTraversable(object): raised = False def traverse(self, name, furtherPath): # pylint:disable=unused-argument BrokenTraversable.raised = True return u'\u2019'.encode('ascii') @interface.implementer(trv_interfaces.ITraversable) class Root(object): def traverse(self, name, furtherPath): # pylint:disable=unused-argument return BrokenTraversable() req = DummyRequest(path='/a/b/c') req.matchdict = {'traverse': ('a', 'b', 'c')} result = traversal.ZopeResourceTreeTraverser(Root())(req) self.assertTrue(BrokenTraversable.raised) assert_that(result, has_entries( context=is_(BrokenTraversable), root=is_(Root), )) class TestConfiguration(unittest.TestCase): layer = ConfiguringLayer def test_configures(self): """ Setting up the layer either works or fails. """ # TODO: More specific tests
32.271429
84
0.666224
256
2,259
5.804688
0.535156
0.026245
0.036339
0.018843
0.144011
0.144011
0.076716
0.076716
0.076716
0
0
0.007071
0.248783
2,259
69
85
32.73913
0.868592
0.333333
0
0.121212
0
0
0.019191
0
0
0
0
0.014493
0.090909
1
0.121212
false
0
0.30303
0.030303
0.636364
0.030303
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
1
0
0
3
63a9c06bef9e3c52d993175d16e6f40a7f3e4e4a
67
py
Python
tests/test_loop_3.py
raffaelfoidl/noworkflow
aa4ca189df24fec6c7abd32bcca6a097b21fdf31
[ "MIT" ]
108
2015-02-04T14:16:51.000Z
2022-03-06T13:52:45.000Z
tests/test_loop_3.py
raffaelfoidl/noworkflow
aa4ca189df24fec6c7abd32bcca6a097b21fdf31
[ "MIT" ]
92
2015-01-19T14:58:06.000Z
2021-04-19T17:28:50.000Z
tests/test_loop_3.py
raffaelfoidl/noworkflow
aa4ca189df24fec6c7abd32bcca6a097b21fdf31
[ "MIT" ]
31
2015-03-03T23:53:59.000Z
2021-11-11T04:23:44.000Z
def y(): pass def x(): y() for i in range(10): x()
6.090909
19
0.402985
12
67
2.25
0.75
0
0
0
0
0
0
0
0
0
0
0.05
0.402985
67
10
20
6.7
0.625
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.166667
0
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
3
63abf18ee5311e15b0d4e04691d32fbb3e20e153
1,357
py
Python
epython/portfolio_management/portfolio/instrument.py
juggler99/epython-showcase
6f458ea67c6110f9ae311d2b0eac1dbbc29e5afa
[ "MIT" ]
null
null
null
epython/portfolio_management/portfolio/instrument.py
juggler99/epython-showcase
6f458ea67c6110f9ae311d2b0eac1dbbc29e5afa
[ "MIT" ]
null
null
null
epython/portfolio_management/portfolio/instrument.py
juggler99/epython-showcase
6f458ea67c6110f9ae311d2b0eac1dbbc29e5afa
[ "MIT" ]
null
null
null
import kydb from portfolio_management.common.base_item import BaseItem class InstrumentBase(BaseItem, kydb.DbObj): """ Represents a financial instrument There are different types The idea is to provide a symbol and the system will pick the fill instrument definition from a data source Still needs instrument type enumeration """ @kydb.stored def instrument_type(self) -> str: return '' @kydb.stored def category(self) -> str: return '' @kydb.stored def description(self) -> str: return '' @kydb.stored def venue(self) -> str: return '' def __str__(self): return '{0}[{1}]: {2}'.format(self.id(), self.instrument_type(), self.category()) class Instrument(InstrumentBase): """ Equity instrument """ @kydb.stored def name(self) -> str: return '' @kydb.stored def symbol(self) -> str: return '' def __str__(self): return '{0} {1}-{2}'.format(super().__str__(), self.name(), self.symbol()) class InstrumentFx(InstrumentBase): """ Fx Spot Instrument """ @kydb.stored def ccy_pair(self) -> str: return '' def __str__(self): return '{0} {1}'.format(super().__str__(), self.ccy_pair()) def main(): pass if __name__ == '__main__': main()
19.385714
89
0.599853
158
1,357
4.936709
0.392405
0.089744
0.116667
0.087179
0.270513
0.270513
0.137179
0.137179
0.137179
0.097436
0
0.00804
0.266765
1,357
69
90
19.666667
0.775879
0.179071
0
0.472222
0
0
0.037072
0
0
0
0
0
0
1
0.305556
false
0.027778
0.055556
0.277778
0.722222
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
63c130324b5b8ef6537c111f2454f399f367ff01
196
py
Python
gnpy/tools/__init__.py
odd22/gnpy
d28c67143eb36b0516721b19643971edb0129331
[ "BSD-3-Clause" ]
98
2018-06-05T22:05:18.000Z
2022-03-21T18:38:28.000Z
gnpy/tools/__init__.py
odd22/gnpy
d28c67143eb36b0516721b19643971edb0129331
[ "BSD-3-Clause" ]
244
2018-06-07T14:08:22.000Z
2022-03-31T17:52:30.000Z
gnpy/tools/__init__.py
odd22/gnpy
d28c67143eb36b0516721b19643971edb0129331
[ "BSD-3-Clause" ]
69
2018-06-29T15:42:23.000Z
2022-03-24T20:43:56.000Z
''' Processing of data via :py:mod:`.json_io`. Utilities for Excel conversion in :py:mod:`.convert` and :py:mod:`.service_sheet`. Example code in :py:mod:`.cli_examples` and :py:mod:`.plots`. '''
32.666667
82
0.693878
32
196
4.15625
0.6875
0.18797
0.105263
0
0
0
0
0
0
0
0
0
0.107143
196
5
83
39.2
0.76
0.954082
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
63cb8d95a386fc3fa1dc715142c15b5337882445
207
py
Python
tests/resources/noxfile_pythons.py
ktbarrett/nox
bf1c226de1c14c576537cf81f629571e5015784f
[ "Apache-2.0" ]
41
2016-03-03T11:18:49.000Z
2018-04-09T09:58:13.000Z
tests/resources/noxfile_pythons.py
ktbarrett/nox
bf1c226de1c14c576537cf81f629571e5015784f
[ "Apache-2.0" ]
79
2016-03-03T17:02:53.000Z
2018-04-23T16:25:51.000Z
tests/resources/noxfile_pythons.py
ktbarrett/nox
bf1c226de1c14c576537cf81f629571e5015784f
[ "Apache-2.0" ]
14
2016-03-03T18:22:20.000Z
2018-01-18T15:55:10.000Z
from __future__ import annotations import nox @nox.session(python=["3.6"]) @nox.parametrize("cheese", ["cheddar", "jack", "brie"]) def snack(unused_session, cheese): print(f"Noms, {cheese} so good!")
20.7
55
0.690821
28
207
4.928571
0.785714
0
0
0
0
0
0
0
0
0
0
0.010989
0.120773
207
9
56
23
0.747253
0
0
0
0
0
0.227053
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.5
0.166667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
63cbe6e48fa63ec40b4bcc7fc7905950dae6a072
208
py
Python
modules/imports.py
williamcanin/pykernel
8196c5f311d8e4100164dfb8e2c694b72c716627
[ "MIT" ]
null
null
null
modules/imports.py
williamcanin/pykernel
8196c5f311d8e4100164dfb8e2c694b72c716627
[ "MIT" ]
null
null
null
modules/imports.py
williamcanin/pykernel
8196c5f311d8e4100164dfb8e2c694b72c716627
[ "MIT" ]
null
null
null
import requests import sys import os from argparse import ArgumentParser from pathlib import Path import json from tqdm import tqdm # Modules for next version. # import re # import platform # import tarfile
16
35
0.807692
30
208
5.6
0.6
0
0
0
0
0
0
0
0
0
0
0
0.168269
208
12
36
17.333333
0.971098
0.317308
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
3
892835d24a7155d168e8b2ab1cd52bd0d3a476d5
258
py
Python
setup.py
teristam/openephys-fileIO
8089e7c4aff829c13a79656b8812a3d3e68eb1eb
[ "MIT" ]
1
2020-08-16T21:52:10.000Z
2020-08-16T21:52:10.000Z
setup.py
teristam/openephys-fileIO
8089e7c4aff829c13a79656b8812a3d3e68eb1eb
[ "MIT" ]
null
null
null
setup.py
teristam/openephys-fileIO
8089e7c4aff829c13a79656b8812a3d3e68eb1eb
[ "MIT" ]
null
null
null
from distutils.core import setup setup( name='openephys_fileIO', version='0.1.0', author='Teris Tam', package=['openephys_fileIO'], install_requires=[ 'numpy', 'scipy', 'pytest', 'pytest-benchmark' ] )
18.428571
33
0.569767
26
258
5.538462
0.807692
0.208333
0
0
0
0
0
0
0
0
0
0.016216
0.282946
258
14
34
18.428571
0.762162
0
0
0
0
0
0.301158
0
0
0
0
0
0
1
0
true
0
0.076923
0
0.076923
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
89283821324389e9a99f615f400f664e2d780ab2
433
py
Python
tagdata/elk_index.py
madelinemccombe/parse_threat_data
a02a82479fe35ee26e7c7b1087b90d9e64609fbc
[ "MIT" ]
null
null
null
tagdata/elk_index.py
madelinemccombe/parse_threat_data
a02a82479fe35ee26e7c7b1087b90d9e64609fbc
[ "MIT" ]
null
null
null
tagdata/elk_index.py
madelinemccombe/parse_threat_data
a02a82479fe35ee26e7c7b1087b90d9e64609fbc
[ "MIT" ]
1
2022-02-14T14:28:59.000Z
2022-02-14T14:28:59.000Z
####!/usr/bin/env python3 """ Parse data files with json output for estack bulk load """ def elk_index(elk_index_name): """ Index setup for ELK Stack bulk install """ index_tag_full = {} index_tag_inner = {} index_tag_inner['_index'] = elk_index_name index_tag_inner['_type'] = elk_index_name index_tag_full['index'] = index_tag_inner return index_tag_full if __name__ == '__main__': elk_index()
21.65
54
0.688222
63
433
4.222222
0.444444
0.210526
0.195489
0.191729
0.25188
0
0
0
0
0
0
0.002865
0.193995
433
19
55
22.789474
0.759312
0.267898
0
0
0
0
0.08
0
0
0
0
0
0
1
0.111111
false
0
0
0
0.222222
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
894d823b6b4aec8146f00c95504f05e1ddeb4a16
160
py
Python
src/flow/urls.py
OpenROAD-Cloud/flow-runner
1a9a22c8fa249d7fcd8d282d1c958033f64f25f3
[ "BSD-2-Clause" ]
null
null
null
src/flow/urls.py
OpenROAD-Cloud/flow-runner
1a9a22c8fa249d7fcd8d282d1c958033f64f25f3
[ "BSD-2-Clause" ]
2
2020-02-12T03:21:51.000Z
2020-02-12T03:24:13.000Z
src/flow/urls.py
OpenROAD-Cloud/flow-runner
1a9a22c8fa249d7fcd8d282d1c958033f64f25f3
[ "BSD-2-Clause" ]
null
null
null
from django.contrib import admin from django.urls import path, include urlpatterns = [ path('', include(('runner.urls', 'runner'), namespace='runner')), ]
22.857143
69
0.7
19
160
5.894737
0.578947
0.178571
0
0
0
0
0
0
0
0
0
0
0.1375
160
6
70
26.666667
0.811594
0
0
0
0
0
0.14375
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
894eadf0b2e1956cbdeee439766cf9428c165d8b
135
py
Python
src/greedy.py
joezeng/pdc2014
d38f22c7c27f5d67e7522a417f42a93aaad0499b
[ "MIT" ]
null
null
null
src/greedy.py
joezeng/pdc2014
d38f22c7c27f5d67e7522a417f42a93aaad0499b
[ "MIT" ]
null
null
null
src/greedy.py
joezeng/pdc2014
d38f22c7c27f5d67e7522a417f42a93aaad0499b
[ "MIT" ]
1
2018-11-20T18:50:25.000Z
2018-11-20T18:50:25.000Z
import sys line1 = sys.stdin.readline() n = [int(i) for i in line1.split()] for i in range(n[0]): if i+1 != n[1]: print i+1, print
15
35
0.607407
29
135
2.827586
0.551724
0.097561
0.146341
0
0
0
0
0
0
0
0
0.055556
0.2
135
8
36
16.875
0.703704
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.142857
null
null
0.285714
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
895591ad0bb73ab96aefe3f4d86ed1066b83c51c
181
py
Python
searchapp/models.py
kaiocesar/django-algolia
b2105ca79a6fa7d72f4935caee86facc479b096f
[ "MIT" ]
null
null
null
searchapp/models.py
kaiocesar/django-algolia
b2105ca79a6fa7d72f4935caee86facc479b096f
[ "MIT" ]
null
null
null
searchapp/models.py
kaiocesar/django-algolia
b2105ca79a6fa7d72f4935caee86facc479b096f
[ "MIT" ]
null
null
null
from django.db import models class Posts(models.Model): title = models.CharField(max_length=200) content = models.TextField() create_at = models.DateTimeField(auto_now_add=True)
30.166667
52
0.79558
26
181
5.384615
0.846154
0
0
0
0
0
0
0
0
0
0
0.018405
0.099448
181
6
52
30.166667
0.840491
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
895b08ad25f5d233542bda8e03dbd18c2e40d28e
7,346
py
Python
api/wecubek8s/wecubek8s/common/k8s.py
zgyzgyhero/wecube-plugins-kubernetes
99982a7a21dff2e54653ecf61055c54e08065087
[ "Apache-2.0" ]
6
2020-12-07T01:32:40.000Z
2021-07-16T05:52:41.000Z
api/wecubek8s/wecubek8s/common/k8s.py
zgyzgyhero/wecube-plugins-kubernetes
99982a7a21dff2e54653ecf61055c54e08065087
[ "Apache-2.0" ]
1
2021-06-11T13:18:24.000Z
2021-06-11T13:18:24.000Z
api/wecubek8s/wecubek8s/common/k8s.py
zgyzgyhero/wecube-plugins-kubernetes
99982a7a21dff2e54653ecf61055c54e08065087
[ "Apache-2.0" ]
2
2020-12-23T08:17:13.000Z
2021-06-11T13:10:16.000Z
# coding=utf-8 # coding=utf-8 from __future__ import absolute_import import logging import json import base64 import urllib3 from kubernetes import client from kubernetes.client import exceptions as k8s_exceptions from talos.core import config from talos.core.i18n import _ from wecubek8s.common import exceptions urllib3.disable_warnings() LOG = logging.getLogger(__name__) CONF = config.CONF class AuthToken: def __init__(self, api_server, token) -> None: self.api_server = api_server self.token = token def __call__(self, configuration) -> None: configuration.host = self.api_server configuration.verify_ssl = False configuration.api_key = {"authorization": "Bearer " + self.token} class AuthUserPass: def __init__(self, api_server, username, password) -> None: self.api_server = api_server self.username = username self.password = password def __call__(self, configuration) -> None: raise NotImplementedError() class Client: def __init__(self, auth) -> None: configuration = client.Configuration() auth(configuration) self.auth = auth api_client = client.ApiClient(configuration) self.core_client = client.CoreV1Api(api_client) self.app_client = client.AppsV1Api(api_client) def _action(self, client, func_name, *args, **kwargs): func = getattr(client, func_name) try: result = func(*args, **kwargs) return result except k8s_exceptions.ApiException as e: raise exceptions.K8sCallError(cluster=self.auth.api_server, msg=json.loads(e.body)['message']) def _action_detail(self, client, func_name, *args, **kwargs): func = getattr(client, func_name) try: result = func(*args, **kwargs) return result except k8s_exceptions.ApiException as e: if e.status == 404: return None raise exceptions.K8sCallError(cluster=self.auth.api_server, msg=json.loads(e.body)['message']) # Node def list_node(self, **kwargs): return self._action(self.core_client, 'list_node', **kwargs) # Namespace def create_namespace(self, body, **kwargs): return self._action(self.core_client, 'create_namespace', body, **kwargs) def update_namespace(self, name, body, **kwargs): return self._action(self.core_client, 'patch_namespace', name, body, **kwargs) def delete_namespace(self, name, **kwargs): return self._action(self.core_client, 'delete_namespace', name, **kwargs) def get_namespace(self, name, **kwargs): return self._action_detail(self.core_client, 'read_namespace', name, **kwargs) def list_namespace(self, **kwargs): return self._action(self.core_client, 'list_namespace', **kwargs) # Deployment def create_deployment(self, namespace, body, **kwargs): return self._action(self.app_client, 'create_namespaced_deployment', namespace, body, **kwargs) def update_deployment(self, name, namespace, body, **kwargs): return self._action(self.app_client, 'patch_namespaced_deployment', name, namespace, body, **kwargs) def delete_deployment(self, name, namespace, **kwargs): return self._action(self.app_client, 'delete_namespaced_deployment', name, namespace, **kwargs) def get_deployment(self, name, namespace, **kwargs): return self._action_detail(self.app_client, 'read_namespaced_deployment', name, namespace, **kwargs) def list_deployment(self, namespace, **kwargs): return self._action(self.app_client, 'list_namespaced_deployment', namespace, **kwargs) def list_all_deployment(self, **kwargs): return self._action(self.app_client, 'list_deployment_for_all_namespaces', **kwargs) # ReplcaSet def list_all_replica_set(self, **kwargs): return self._action(self.app_client, 'list_replica_set_for_all_namespaces', **kwargs) # Pod def list_all_pod(self, **kwargs): return self._action(self.core_client, 'list_pod_for_all_namespaces', **kwargs) # Service def create_service(self, namespace, body, **kwargs): return self._action(self.core_client, 'create_namespaced_service', namespace, body, **kwargs) def update_service(self, name, namespace, body, **kwargs): return self._action(self.core_client, 'patch_namespaced_service', name, namespace, body, **kwargs) def delete_service(self, name, namespace, **kwargs): return self._action(self.core_client, 'delete_namespaced_service', name, namespace, **kwargs) def get_service(self, name, namespace, **kwargs): return self._action_detail(self.core_client, 'read_namespaced_service', name, namespace, **kwargs) def list_service(self, namespace, **kwargs): return self._action(self.core_client, 'list_namespaced_service', namespace, **kwargs) def list_all_service(self, **kwargs): return self._action(self.core_client, 'list_service_for_all_namespaces', **kwargs) # Secret def create_secret(self, namespace, body, **kwargs): return self._action(self.core_client, 'create_namespaced_secret', namespace, body, **kwargs) def update_secret(self, name, namespace, body, **kwargs): return self._action(self.core_client, 'patch_namespaced_secret', name, namespace, body, **kwargs) def delete_secret(self, name, namespace, **kwargs): return self._action(self.core_client, 'delete_namespaced_secret', name, namespace, **kwargs) def get_secret(self, name, namespace, **kwargs): return self._action_detail(self.core_client, 'read_namespaced_secret', name, namespace, **kwargs) def list_secret(self, namespace, **kwargs): return self._action(self.core_client, 'list_namespaced_secret', namespace, **kwargs) def ensure_registry_secret(self, name, namespace, server, username, password, email=None, **kwargs): auth_data = { 'auths': { server: { "username": username, "password": password, "auth": base64.b64encode(("%s:%s" % (username, password)).encode('utf-8')).decode() } } } if email is not None: auth_data[server]['email'] = email body = { 'apiVersion': 'v1', 'kind': 'Secret', 'metadata': { 'name': name, 'namespace': namespace }, 'type': 'kubernetes.io/dockerconfigjson', 'data': { '.dockerconfigjson': base64.b64encode((json.dumps(auth_data).encode('utf-8'))).decode() } } has_secret = self.get_secret(name, namespace) if has_secret is None: self.create_secret(namespace, body, **kwargs) else: self.update_secret(name, namespace, body) return True def ensure_namespace(self, name, **kwargs): body = {'apiVersion': 'v1', 'kind': 'Namespace', 'metadata': {'name': name}, 'labels': {}} has_namespace = self.get_namespace(name) if has_namespace is None: self.create_namespace(body, **kwargs) else: self.update_namespace(name, body) return True
38.663158
108
0.659679
841
7,346
5.512485
0.142687
0.069888
0.086281
0.118637
0.55371
0.493529
0.415876
0.394737
0.353322
0.234254
0
0.005427
0.222434
7,346
190
109
38.663158
0.806197
0.01089
0
0.142857
0
0
0.110376
0.07262
0
0
0
0
0
1
0.242857
false
0.042857
0.071429
0.178571
0.55
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
897423d2dba4be5b162d342aa665872d0202128f
100
py
Python
bocadillo/sessions.py
sfermigier/bocadillo
c96842edf3657af2627cce0d8af30776916c1e30
[ "MIT" ]
null
null
null
bocadillo/sessions.py
sfermigier/bocadillo
c96842edf3657af2627cce0d8af30776916c1e30
[ "MIT" ]
null
null
null
bocadillo/sessions.py
sfermigier/bocadillo
c96842edf3657af2627cce0d8af30776916c1e30
[ "MIT" ]
null
null
null
class MissingSecretKey(Exception): """Raised when no cookie siggnature secret key was found."""
33.333333
64
0.75
12
100
6.25
1
0
0
0
0
0
0
0
0
0
0
0
0.15
100
2
65
50
0.882353
0.54
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
8976fdd7b31eb78436423bf42c169d955b5a361b
560
py
Python
Python/mex1.py
chernyshov-dev/ideal-octo-waffle
c50f29795352766752dbbbcd46693ff54f23369b
[ "WTFPL" ]
3
2021-08-29T15:22:08.000Z
2021-08-29T17:12:01.000Z
Python/mex1.py
chernyshov-dev/ideal-octo-waffle
c50f29795352766752dbbbcd46693ff54f23369b
[ "WTFPL" ]
11
2021-09-07T19:24:15.000Z
2022-01-13T19:51:25.000Z
Python/mex1.py
chernyshov-dev/university-practice-heap
c50f29795352766752dbbbcd46693ff54f23369b
[ "WTFPL" ]
null
null
null
import math def foo(a,b,d): if a == 0 or b == 0 or d == 0: return 'Заданный треугольник не существует' s = 1/2 * a * b * math.sin(math.radians(d)) return round(s,3) def geron(a, b, c): if a == 0 or b == 0 or c == 0: return 'Заданный треугольник не существует' elif a + b < c or a + c < b or b + c < a: return 'Заданный треугольник не существует' else: p = (a + b + c)/2 s = math.sqrt(p*(p - a) * (p - b) * (p - c)) return round(s,3)
21.538462
53
0.458929
91
560
2.824176
0.307692
0.038911
0.291829
0.315175
0.51751
0.373541
0.077821
0
0
0
0
0.032934
0.403571
560
25
54
22.4
0.736527
0
0
0.333333
0
0
0.19209
0
0
0
0
0
0
1
0.133333
false
0
0.066667
0
0.533333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
89823972b35e357fd974eb304c5d1f82b7d743c7
134
py
Python
dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/__init__.py
srcarter3/python-awips
d981062662968cf3fb105e8e23d955950ae2497e
[ "BSD-3-Clause" ]
33
2016-03-17T01:21:18.000Z
2022-02-08T10:41:06.000Z
dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/__init__.py
srcarter3/python-awips
d981062662968cf3fb105e8e23d955950ae2497e
[ "BSD-3-Clause" ]
15
2016-04-19T16:34:08.000Z
2020-09-09T19:57:54.000Z
dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/__init__.py
Unidata/python-awips
8459aa756816e5a45d2e5bea534d23d5b1dd1690
[ "BSD-3-Clause" ]
20
2016-03-12T01:46:58.000Z
2022-02-08T06:53:22.000Z
__all__ = [ 'DataURINotificationMessage' ] from .DataURINotificationMessage import DataURINotificationMessage
19.142857
66
0.708955
6
134
15.166667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.246269
134
6
67
22.333333
0.90099
0
0
0
0
0
0.195489
0.195489
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
899277ef9ccb15e267d4d35968db55615ee39955
151
py
Python
src/dbxdeploy/notebook/loader.py
DataSentics/dbx-deploy
c019d80a2b947b4f180071e97c3981daa3a2214a
[ "MIT" ]
null
null
null
src/dbxdeploy/notebook/loader.py
DataSentics/dbx-deploy
c019d80a2b947b4f180071e97c3981daa3a2214a
[ "MIT" ]
null
null
null
src/dbxdeploy/notebook/loader.py
DataSentics/dbx-deploy
c019d80a2b947b4f180071e97c3981daa3a2214a
[ "MIT" ]
null
null
null
from pathlib import Path def loadNotebook(notebookPath: Path) -> str: with notebookPath.open('r', encoding='utf-8') as f: return f.read()
25.166667
55
0.675497
21
151
4.857143
0.857143
0
0
0
0
0
0
0
0
0
0
0.008197
0.192053
151
5
56
30.2
0.827869
0
0
0
0
0
0.039735
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
89bce2483aa68780cb4056a30f34fb6ade97a146
349
py
Python
ee/models/__init__.py
dorucioclea/posthog
a7e792c3fc5c1abc70d8167e1ead12d4ea24f17a
[ "MIT" ]
null
null
null
ee/models/__init__.py
dorucioclea/posthog
a7e792c3fc5c1abc70d8167e1ead12d4ea24f17a
[ "MIT" ]
null
null
null
ee/models/__init__.py
dorucioclea/posthog
a7e792c3fc5c1abc70d8167e1ead12d4ea24f17a
[ "MIT" ]
null
null
null
from .event_definition import EnterpriseEventDefinition from .explicit_team_membership import ExplicitTeamMembership from .hook import Hook from .license import License from .property_definition import EnterprisePropertyDefinition __all__ = ["EnterpriseEventDefinition", "ExplicitTeamMembership", "Hook", "License", "EnterprisePropertyDefinition"]
43.625
116
0.856734
30
349
9.7
0.466667
0.109966
0
0
0
0
0
0
0
0
0
0
0.080229
349
7
117
49.857143
0.906542
0
0
0
0
0
0.246418
0.2149
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
983abdd7991953edaa224ffc46bf5407d0ac0786
125
py
Python
testing-api/app/routes/router.py
Klavionik/BlackSheep-Examples
018bce8ac6023e47cbd4cd49784e260626c886bd
[ "MIT" ]
2
2021-06-21T09:34:50.000Z
2021-09-06T10:16:12.000Z
testing-api/app/routes/router.py
Klavionik/BlackSheep-Examples
018bce8ac6023e47cbd4cd49784e260626c886bd
[ "MIT" ]
1
2022-02-05T15:29:10.000Z
2022-02-05T15:29:10.000Z
testing-api/app/routes/router.py
Klavionik/BlackSheep-Examples
018bce8ac6023e47cbd4cd49784e260626c886bd
[ "MIT" ]
1
2022-02-01T21:38:43.000Z
2022-02-01T21:38:43.000Z
from blacksheep.server.routing import Router router = Router() get = router.get post = router.post delete = router.delete
13.888889
44
0.76
17
125
5.588235
0.529412
0.252632
0
0
0
0
0
0
0
0
0
0
0.152
125
8
45
15.625
0.896226
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
983b8d8c40f70ae03528fa94bc3742ca66010260
1,748
py
Python
packages/pyright-internal/src/tests/samples/operators1.py
Microsoft/pyright
adf7c3e92e4540d930e3652de3c1c335855af595
[ "MIT" ]
3,934
2019-03-22T09:26:41.000Z
2019-05-06T21:03:08.000Z
packages/pyright-internal/src/tests/samples/operators1.py
Microsoft/pyright
adf7c3e92e4540d930e3652de3c1c335855af595
[ "MIT" ]
107
2019-03-24T04:09:37.000Z
2019-05-06T17:00:04.000Z
packages/pyright-internal/src/tests/samples/operators1.py
Microsoft/pyright
adf7c3e92e4540d930e3652de3c1c335855af595
[ "MIT" ]
119
2019-03-23T10:48:04.000Z
2019-05-06T08:57:56.000Z
# This sample tests the type checker's ability to check # custom operator overrides. from typing import Union class A(object): def __eq__(self, Foo): return "equal" class B(object): def __ne__(self, Bar): return self def __lt__(self, Bar): return "string" def __gt__(self, Bar): return "string" def __ge__(self, Bar): return "string" def __le__(self, Bar): return "string" def needs_a_string(val: str): pass def needs_a_string_or_bool(val: Union[bool, str]): pass def test(): a = A() needs_a_string(a == a) # This should generate an error because there # is no __ne__ operator defined, so a bool # value will result. needs_a_string(a != a) if True: a = B() # At this point, a should be of type Union[Foo, Bar], # so the == operator should return either a str or # a bool. needs_a_string_or_bool(a == a) # This should generate an error. needs_a_string(a == a) # This should generate an error. needs_a_string_or_bool(a != a) b = B() needs_a_string(b < b) needs_a_string(b > b) needs_a_string(b <= b) needs_a_string(b >= b) class C: def __getattr__(self, name: str, /): if name == "__add__": return lambda _: 0 a = C() a.__add__ # This should generate an error because __getattr__ is not used # when looking up operator overload methods. b = a + 0 class D: def __init__(self): self.__add__ = lambda x: x d = D() # This should generate an error because __add__ is not a class variable. _ = d + d class E: __slots__ = ("__add__",) def __init__(self): self.__add__ = lambda x: x e = E() _ = e + e
17.137255
72
0.608124
264
1,748
3.636364
0.30303
0.06875
0.1375
0.104167
0.4625
0.3375
0.263542
0.236458
0.182292
0.182292
0
0.001613
0.290618
1,748
101
73
17.306931
0.772581
0.303776
0
0.235294
0
0
0.035744
0
0
0
0
0
0
1
0.235294
false
0.039216
0.019608
0.117647
0.509804
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
98441054a2a227acc600614ec92ab528cd92c1ed
1,244
py
Python
CGAN/utils.py
aryanpanpalia/GANs
5fb25ec3757668c9ee2c5fa685354632fd5da007
[ "MIT" ]
1
2021-06-24T03:08:08.000Z
2021-06-24T03:08:08.000Z
CGAN/utils.py
aryanpanpalia/GANs
5fb25ec3757668c9ee2c5fa685354632fd5da007
[ "MIT" ]
null
null
null
CGAN/utils.py
aryanpanpalia/GANs
5fb25ec3757668c9ee2c5fa685354632fd5da007
[ "MIT" ]
null
null
null
import matplotlib.pyplot as plt import torch from torchvision.utils import make_grid import torch.nn.functional as F from torch import nn torch.manual_seed(0) def get_noise(n_samples, input_dim, device='cpu'): return torch.randn(n_samples, input_dim, device=device) def show_tensor_images(image_tensor, num_images=25, nrow=5): image_tensor = (image_tensor + 1) / 2 image_unflat = image_tensor.detach().cpu() image_grid = make_grid(image_unflat[:num_images], nrow=nrow) plt.imshow(image_grid.permute(1, 2, 0).squeeze()) plt.show() def get_one_hot_labels(labels, n_classes): return F.one_hot(labels, n_classes) def combine_vectors(x, y): return torch.cat((x, y), 1).float() def get_input_dimensions(z_dim, image_shape, n_classes): generator_input_dim = z_dim + n_classes discriminator_im_chan = n_classes + image_shape[0] return generator_input_dim, discriminator_im_chan def weights_init(m): if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d): torch.nn.init.normal_(m.weight, 0.0, 0.02) if isinstance(m, nn.BatchNorm2d): torch.nn.init.normal_(m.weight, 0.0, 0.02) torch.nn.init.constant_(m.bias, 0)
28.930233
70
0.702572
194
1,244
4.257732
0.381443
0.048426
0.047216
0.038741
0.123487
0.070218
0.070218
0.070218
0.070218
0.070218
0
0.024655
0.184887
1,244
42
71
29.619048
0.789941
0
0
0.071429
0
0
0.002496
0
0
0
0
0
0
1
0.214286
false
0
0.178571
0.107143
0.535714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
9877fde6709f051fb00d20d2e7ad73a2ebe1a8f9
3,117
py
Python
pyOCD/flash/flash_stm32l486.py
mesheven/mesh-pyocd-old
99ecfeeac95820dacab52a1280b0fba6d4f51fc9
[ "Apache-2.0" ]
null
null
null
pyOCD/flash/flash_stm32l486.py
mesheven/mesh-pyocd-old
99ecfeeac95820dacab52a1280b0fba6d4f51fc9
[ "Apache-2.0" ]
null
null
null
pyOCD/flash/flash_stm32l486.py
mesheven/mesh-pyocd-old
99ecfeeac95820dacab52a1280b0fba6d4f51fc9
[ "Apache-2.0" ]
null
null
null
""" mbed CMSIS-DAP debugger Copyright (c) 2006-2013 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from flash import Flash flash_algo = { 'load_address' : 0x20000000, 'instructions' : [ 0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2, 0x6000f1a0, 0x47700ac0, 0x49514852, 0x49526081, 0x20006081, 0x494f4770, 0x30f9f24c, 0x47706108, 0xf24c494c, 0x610830f9, 0x47702000, 0x47702000, 0xb5004a48, 0x28006950, 0xf7ffda01, 0x6910ffe5, 0xd4fc03c0, 0x30f9f24c, 0x69506110, 0x0104f248, 0x61504308, 0xf4406950, 0x61503080, 0x03c06910, 0x6950d4fc, 0x61504388, 0xbd002000, 0x4a39b510, 0x23004604, 0x28006950, 0xf7ffda01, 0x6911ffc5, 0xd4fc03c8, 0x30f9f24c, 0xf1a46110, 0x0ae06400, 0xd90228ff, 0x6300f44f, 0x6951b2c0, 0x74f8f640, 0x240243a1, 0x00c0eb04, 0x43084318, 0x69506150, 0x3080f440, 0x69106150, 0xd4fc03c0, 0xf0206950, 0x61500002, 0xbd102000, 0x460eb5f0, 0x07492400, 0x2701d001, 0x2700e000, 0xf0204d1e, 0x69680307, 0xda012800, 0xff90f7ff, 0x03c06928, 0xf24cd4fc, 0x612830f9, 0x01d6eb07, 0xf040e021, 0x61680001, 0xc301ca01, 0xc301ca01, 0x03c06928, 0x6928d4fc, 0xd00107c0, 0x61282001, 0x0c08f852, 0x6c08f853, 0xd10542b0, 0x0c04f852, 0x6c04f853, 0xd00542b0, 0xf0206968, 0x61680001, 0xbdf02001, 0x69681c64, 0xd8da42a1, 0x0001f020, 0x20006168, 0x0000bdf0, 0x45670123, 0x40022000, 0xcdef89ab, 0x00000000, ], 'pc_init' : 0x20000041, 'pc_eraseAll' : 0x20000051, 'pc_erase_sector' : 0x2000008D, 'pc_program_page' : 0x200000E9, 'static_base' : 0x20000200, 'begin_data' : 0x20001000, # Analyzer uses a max of 2 KB data (512 pages * 4 bytes / page) 'page_buffers' : [0x20001000, 0x20001800], # Enable double buffering 'begin_stack' : 0x20002800, 'min_program_length' : 8, 'analyzer_supported' : True, 'analyzer_address' : 0x20003000 # Analyzer 0x20003000..0x20003600 }; class Flash_stm32l486(Flash): def __init__(self, target): super(Flash_stm32l486, self).__init__(target, flash_algo)
57.722222
130
0.616939
274
3,117
6.919708
0.762774
0.031646
0.013713
0.016878
0
0
0
0
0
0
0
0.394195
0.314726
3,117
54
131
57.722222
0.493446
0.225537
0
0
0
0
0.070175
0
0
0
0.446951
0
0
1
0.032258
false
0
0.032258
0
0.096774
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
3
7f3f6cbfffcc0317e47e0a58461d84aeb462d4b2
119
py
Python
homeassistant/components/filter/__init__.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
30,023
2016-04-13T10:17:53.000Z
2020-03-02T12:56:31.000Z
homeassistant/components/filter/__init__.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
31,101
2020-03-02T13:00:16.000Z
2022-03-31T23:57:36.000Z
homeassistant/components/filter/__init__.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
11,956
2016-04-13T18:42:31.000Z
2020-03-02T09:32:12.000Z
"""The filter component.""" from homeassistant.const import Platform DOMAIN = "filter" PLATFORMS = [Platform.SENSOR]
17
40
0.747899
13
119
6.846154
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.12605
119
6
41
19.833333
0.855769
0.176471
0
0
0
0
0.065217
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
7f4882c2d2287cde42e6aa73d9c6ab05ec059d72
45,170
py
Python
shenaniganfs/generated/rfc1813.py
JordanMilne/ShenaniganFS
fbe346881c89ed06ec5a8c3832d530d3704e0ed5
[ "MIT" ]
1
2021-03-03T14:17:39.000Z
2021-03-03T14:17:39.000Z
shenaniganfs/generated/rfc1813.py
JordanMilne/ShenaniganFS
fbe346881c89ed06ec5a8c3832d530d3704e0ed5
[ "MIT" ]
null
null
null
shenaniganfs/generated/rfc1813.py
JordanMilne/ShenaniganFS
fbe346881c89ed06ec5a8c3832d530d3704e0ed5
[ "MIT" ]
null
null
null
# Auto-generated from IDL file import abc import dataclasses import typing from dataclasses import dataclass from shenaniganfs import rpchelp TRUE = True FALSE = False NFS3_OK = 0 NFS3ERR_PERM = 1 NFS3ERR_NOENT = 2 NFS3ERR_IO = 5 NFS3ERR_NXIO = 6 NFS3ERR_ACCES = 13 NFS3ERR_EXIST = 17 NFS3ERR_XDEV = 18 NFS3ERR_NODEV = 19 NFS3ERR_NOTDIR = 20 NFS3ERR_ISDIR = 21 NFS3ERR_INVAL = 22 NFS3ERR_FBIG = 27 NFS3ERR_NOSPC = 28 NFS3ERR_ROFS = 30 NFS3ERR_MLINK = 31 NFS3ERR_NAMETOOLONG = 63 NFS3ERR_NOTEMPTY = 66 NFS3ERR_DQUOT = 69 NFS3ERR_STALE = 70 NFS3ERR_REMOTE = 71 NFS3ERR_BADHANDLE = 10001 NFS3ERR_NOT_SYNC = 10002 NFS3ERR_BAD_COOKIE = 10003 NFS3ERR_NOTSUPP = 10004 NFS3ERR_TOOSMALL = 10005 NFS3ERR_SERVERFAULT = 10006 NFS3ERR_BADTYPE = 10007 NFS3ERR_JUKEBOX = 10008 NF3REG = 1 NF3DIR = 2 NF3BLK = 3 NF3CHR = 4 NF3LNK = 5 NF3SOCK = 6 NF3FIFO = 7 DONT_CHANGE = 0 SET_TO_SERVER_TIME = 1 SET_TO_CLIENT_TIME = 2 UNSTABLE = 0 DATA_SYNC = 1 FILE_SYNC = 2 UNCHECKED = 0 GUARDED = 1 EXCLUSIVE = 2 MNT3_OK = 0 MNT3ERR_PERM = 1 MNT3ERR_NOENT = 2 MNT3ERR_IO = 5 MNT3ERR_ACCES = 13 MNT3ERR_NOTDIR = 20 MNT3ERR_INVAL = 22 MNT3ERR_NAMETOOLONG = 63 MNT3ERR_NOTSUPP = 10004 MNT3ERR_SERVERFAULT = 10006 NFS3_FHSIZE = 64 NFS3_COOKIEVERFSIZE = 8 NFS3_CREATEVERFSIZE = 8 NFS3_WRITEVERFSIZE = 8 Uint64 = rpchelp.r_uhyper Int64 = rpchelp.r_hyper Uint32 = rpchelp.r_uint Int32 = rpchelp.r_int Filename3 = rpchelp.Opaque(rpchelp.LengthType.VAR, None) NFSPath3 = rpchelp.Opaque(rpchelp.LengthType.VAR, None) Fileid3 = Uint64 Cookie3 = Uint64 Cookieverf3 = rpchelp.Opaque(rpchelp.LengthType.FIXED, NFS3_COOKIEVERFSIZE) Createverf3 = rpchelp.Opaque(rpchelp.LengthType.FIXED, NFS3_CREATEVERFSIZE) Writeverf3 = rpchelp.Opaque(rpchelp.LengthType.FIXED, NFS3_WRITEVERFSIZE) Uid3 = Uint32 Gid3 = Uint32 Size3 = Uint64 Offset3 = Uint64 Mode3 = Uint32 Count3 = Uint32 class NFSStat3(rpchelp.Enum): # nfsstat3 NFS3_OK = 0 NFS3ERR_PERM = 1 NFS3ERR_NOENT = 2 NFS3ERR_IO = 5 NFS3ERR_NXIO = 6 NFS3ERR_ACCES = 13 NFS3ERR_EXIST = 17 NFS3ERR_XDEV = 18 NFS3ERR_NODEV = 19 NFS3ERR_NOTDIR = 20 NFS3ERR_ISDIR = 21 NFS3ERR_INVAL = 22 NFS3ERR_FBIG = 27 NFS3ERR_NOSPC = 28 NFS3ERR_ROFS = 30 NFS3ERR_MLINK = 31 NFS3ERR_NAMETOOLONG = 63 NFS3ERR_NOTEMPTY = 66 NFS3ERR_DQUOT = 69 NFS3ERR_STALE = 70 NFS3ERR_REMOTE = 71 NFS3ERR_BADHANDLE = 10001 NFS3ERR_NOT_SYNC = 10002 NFS3ERR_BAD_COOKIE = 10003 NFS3ERR_NOTSUPP = 10004 NFS3ERR_TOOSMALL = 10005 NFS3ERR_SERVERFAULT = 10006 NFS3ERR_BADTYPE = 10007 NFS3ERR_JUKEBOX = 10008 class Ftype3(rpchelp.Enum): # ftype3 NF3REG = 1 NF3DIR = 2 NF3BLK = 3 NF3CHR = 4 NF3LNK = 5 NF3SOCK = 6 NF3FIFO = 7 @dataclass class SpecData3(rpchelp.Struct): # specdata3 specdata1: int = rpchelp.rpc_field(Uint32) specdata2: int = rpchelp.rpc_field(Uint32) NFSFh3 = rpchelp.Opaque(rpchelp.LengthType.VAR, NFS3_FHSIZE) @dataclass class NFSTime3(rpchelp.Struct): # nfstime3 seconds: int = rpchelp.rpc_field(Uint32) nseconds: int = rpchelp.rpc_field(Uint32) @dataclass class FAttr3(rpchelp.Struct): # fattr3 type: typing.Union[Ftype3, int] = rpchelp.rpc_field(Ftype3) mode: int = rpchelp.rpc_field(Mode3) nlink: int = rpchelp.rpc_field(Uint32) uid: int = rpchelp.rpc_field(Uid3) gid: int = rpchelp.rpc_field(Gid3) size: int = rpchelp.rpc_field(Size3) used: int = rpchelp.rpc_field(Size3) rdev: SpecData3 = rpchelp.rpc_field(SpecData3) fsid: int = rpchelp.rpc_field(Uint64) fileid: int = rpchelp.rpc_field(Fileid3) atime: NFSTime3 = rpchelp.rpc_field(NFSTime3) mtime: NFSTime3 = rpchelp.rpc_field(NFSTime3) ctime: NFSTime3 = rpchelp.rpc_field(NFSTime3) @dataclass class WccAttr(rpchelp.Struct): # wcc_attr size: int = rpchelp.rpc_field(Size3) mtime: NFSTime3 = rpchelp.rpc_field(NFSTime3) ctime: NFSTime3 = rpchelp.rpc_field(NFSTime3) PostOpAttr = rpchelp.OptData(FAttr3) PreOpAttr = rpchelp.OptData(WccAttr) @dataclass class WccData(rpchelp.Struct): # wcc_data before: typing.Optional[WccAttr] = rpchelp.rpc_field(PreOpAttr) after: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) PostOpFh3 = rpchelp.OptData(NFSFh3) class TimeHow(rpchelp.Enum): # time_how DONT_CHANGE = 0 SET_TO_SERVER_TIME = 1 SET_TO_CLIENT_TIME = 2 SetMode3 = rpchelp.OptData(Mode3) SetUid3 = rpchelp.OptData(Uid3) SetGid3 = rpchelp.OptData(Gid3) SetSize3 = rpchelp.OptData(Size3) @dataclass class SetTime(rpchelp.Union): # set_time SWITCH_OPTIONS = {None: None, SET_TO_CLIENT_TIME: 'time_val'} set_it: typing.Union[TimeHow, int] = rpchelp.rpc_field(TimeHow) time_val: typing.Optional[NFSTime3] = rpchelp.rpc_field(NFSTime3, default=None) @dataclass class SAttr3(rpchelp.Struct): # sattr3 mode: typing.Optional[int] = rpchelp.rpc_field(SetMode3) uid: typing.Optional[int] = rpchelp.rpc_field(SetUid3) gid: typing.Optional[int] = rpchelp.rpc_field(SetGid3) size: typing.Optional[int] = rpchelp.rpc_field(SetSize3) atime: SetTime = rpchelp.rpc_field(SetTime) mtime: SetTime = rpchelp.rpc_field(SetTime) @dataclass class DiropArgs3(rpchelp.Struct): # diropargs3 dir_handle: bytes = rpchelp.rpc_field(NFSFh3) name: bytes = rpchelp.rpc_field(Filename3) @dataclass class GETATTR3Args(rpchelp.Struct): # GETATTR3args obj_handle: bytes = rpchelp.rpc_field(NFSFh3) @dataclass class GETATTR3ResOK(rpchelp.Struct): # GETATTR3resok obj_attributes: FAttr3 = rpchelp.rpc_field(FAttr3) @dataclass class GETATTR3Res(rpchelp.Union): # GETATTR3res SWITCH_OPTIONS = {None: None, NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resok: typing.Optional[GETATTR3ResOK] = rpchelp.rpc_field(GETATTR3ResOK, default=None) @dataclass class Sattrguard3(rpchelp.Union): # sattrguard3 SWITCH_OPTIONS = {FALSE: None, TRUE: 'obj_ctime'} check: bool = rpchelp.rpc_field(rpchelp.r_bool) obj_ctime: typing.Optional[NFSTime3] = rpchelp.rpc_field(NFSTime3, default=None) @dataclass class SETATTR3Args(rpchelp.Struct): # SETATTR3args obj_handle: bytes = rpchelp.rpc_field(NFSFh3) new_attributes: SAttr3 = rpchelp.rpc_field(SAttr3) guard: Sattrguard3 = rpchelp.rpc_field(Sattrguard3) @dataclass class SETATTR3ResOK(rpchelp.Struct): # SETATTR3resok obj_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class SETATTR3ResFail(rpchelp.Struct): # SETATTR3resfail obj_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class SETATTR3Res(rpchelp.Union): # SETATTR3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[SETATTR3ResFail] = rpchelp.rpc_field(SETATTR3ResFail, default=None) resok: typing.Optional[SETATTR3ResOK] = rpchelp.rpc_field(SETATTR3ResOK, default=None) @dataclass class LOOKUP3Args(rpchelp.Struct): # LOOKUP3args what: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) @dataclass class LOOKUP3ResOK(rpchelp.Struct): # LOOKUP3resok obj_handle: bytes = rpchelp.rpc_field(NFSFh3) obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) dir_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class LOOKUP3ResFail(rpchelp.Struct): # LOOKUP3resfail dir_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class LOOKUP3Res(rpchelp.Union): # LOOKUP3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[LOOKUP3ResFail] = rpchelp.rpc_field(LOOKUP3ResFail, default=None) resok: typing.Optional[LOOKUP3ResOK] = rpchelp.rpc_field(LOOKUP3ResOK, default=None) ACCESS3_READ = 0x0001 ACCESS3_LOOKUP = 0x0002 ACCESS3_MODIFY = 0x0004 ACCESS3_EXTEND = 0x0008 ACCESS3_DELETE = 0x0010 ACCESS3_EXECUTE = 0x0020 @dataclass class ACCESS3Args(rpchelp.Struct): # ACCESS3args obj_handle: bytes = rpchelp.rpc_field(NFSFh3) access: int = rpchelp.rpc_field(Uint32) @dataclass class ACCESS3ResOK(rpchelp.Struct): # ACCESS3resok obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) access: int = rpchelp.rpc_field(Uint32) @dataclass class ACCESS3ResFail(rpchelp.Struct): # ACCESS3resfail obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class ACCESS3Res(rpchelp.Union): # ACCESS3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[ACCESS3ResFail] = rpchelp.rpc_field(ACCESS3ResFail, default=None) resok: typing.Optional[ACCESS3ResOK] = rpchelp.rpc_field(ACCESS3ResOK, default=None) @dataclass class READLINK3Args(rpchelp.Struct): # READLINK3args symlink_handle: bytes = rpchelp.rpc_field(NFSFh3) @dataclass class READLINK3ResOK(rpchelp.Struct): # READLINK3resok symlink_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) data: bytes = rpchelp.rpc_field(NFSPath3) @dataclass class READLINK3ResFail(rpchelp.Struct): # READLINK3resfail symlink_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class READLINK3Res(rpchelp.Union): # READLINK3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[READLINK3ResFail] = rpchelp.rpc_field(READLINK3ResFail, default=None) resok: typing.Optional[READLINK3ResOK] = rpchelp.rpc_field(READLINK3ResOK, default=None) @dataclass class READ3Args(rpchelp.Struct): # READ3args file_handle: bytes = rpchelp.rpc_field(NFSFh3) offset: int = rpchelp.rpc_field(Offset3) count: int = rpchelp.rpc_field(Count3) @dataclass class READ3ResOK(rpchelp.Struct): # READ3resok file_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) count: int = rpchelp.rpc_field(Count3) eof: bool = rpchelp.rpc_field(rpchelp.r_bool) data: bytes = rpchelp.rpc_field(rpchelp.Opaque(rpchelp.LengthType.VAR, None)) @dataclass class READ3ResFail(rpchelp.Struct): # READ3resfail file_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class READ3Res(rpchelp.Union): # READ3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[READ3ResFail] = rpchelp.rpc_field(READ3ResFail, default=None) resok: typing.Optional[READ3ResOK] = rpchelp.rpc_field(READ3ResOK, default=None) class StableHow(rpchelp.Enum): # stable_how UNSTABLE = 0 DATA_SYNC = 1 FILE_SYNC = 2 @dataclass class WRITE3Args(rpchelp.Struct): # WRITE3args file_handle: bytes = rpchelp.rpc_field(NFSFh3) offset: int = rpchelp.rpc_field(Offset3) count: int = rpchelp.rpc_field(Count3) stable: typing.Union[StableHow, int] = rpchelp.rpc_field(StableHow) data: bytes = rpchelp.rpc_field(rpchelp.Opaque(rpchelp.LengthType.VAR, None)) @dataclass class WRITE3ResOK(rpchelp.Struct): # WRITE3resok file_wcc: WccData = rpchelp.rpc_field(WccData) count: int = rpchelp.rpc_field(Count3) committed: typing.Union[StableHow, int] = rpchelp.rpc_field(StableHow) verf: bytes = rpchelp.rpc_field(Writeverf3) @dataclass class WRITE3ResFail(rpchelp.Struct): # WRITE3resfail file_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class WRITE3Res(rpchelp.Union): # WRITE3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[WRITE3ResFail] = rpchelp.rpc_field(WRITE3ResFail, default=None) resok: typing.Optional[WRITE3ResOK] = rpchelp.rpc_field(WRITE3ResOK, default=None) class Createmode3(rpchelp.Enum): # createmode3 UNCHECKED = 0 GUARDED = 1 EXCLUSIVE = 2 @dataclass class Createhow3(rpchelp.Union): # createhow3 SWITCH_OPTIONS = {UNCHECKED: 'obj_attributes', GUARDED: 'obj_attributes', EXCLUSIVE: 'verf'} mode: typing.Union[Createmode3, int] = rpchelp.rpc_field(Createmode3) obj_attributes: typing.Optional[SAttr3] = rpchelp.rpc_field(SAttr3, default=None) verf: typing.Optional[bytes] = rpchelp.rpc_field(Createverf3, default=None) @dataclass class CREATE3Args(rpchelp.Struct): # CREATE3args where: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) how: Createhow3 = rpchelp.rpc_field(Createhow3) @dataclass class CREATE3ResOK(rpchelp.Struct): # CREATE3resok obj_handle: typing.Optional[bytes] = rpchelp.rpc_field(PostOpFh3) obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class CREATE3ResFail(rpchelp.Struct): # CREATE3resfail dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class CREATE3Res(rpchelp.Union): # CREATE3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[CREATE3ResFail] = rpchelp.rpc_field(CREATE3ResFail, default=None) resok: typing.Optional[CREATE3ResOK] = rpchelp.rpc_field(CREATE3ResOK, default=None) @dataclass class MKDIR3Args(rpchelp.Struct): # MKDIR3args where: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) attributes: SAttr3 = rpchelp.rpc_field(SAttr3) @dataclass class MKDIR3ResOK(rpchelp.Struct): # MKDIR3resok obj_handle: typing.Optional[bytes] = rpchelp.rpc_field(PostOpFh3) obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class MKDIR3ResFail(rpchelp.Struct): # MKDIR3resfail dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class MKDIR3Res(rpchelp.Union): # MKDIR3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[MKDIR3ResFail] = rpchelp.rpc_field(MKDIR3ResFail, default=None) resok: typing.Optional[MKDIR3ResOK] = rpchelp.rpc_field(MKDIR3ResOK, default=None) @dataclass class SymlinkData3(rpchelp.Struct): # symlinkdata3 symlink_attributes: SAttr3 = rpchelp.rpc_field(SAttr3) symlink_data: bytes = rpchelp.rpc_field(NFSPath3) @dataclass class SYMLINK3Args(rpchelp.Struct): # SYMLINK3args where: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) symlink: SymlinkData3 = rpchelp.rpc_field(SymlinkData3) @dataclass class SYMLINK3ResOK(rpchelp.Struct): # SYMLINK3resok obj_handle: typing.Optional[bytes] = rpchelp.rpc_field(PostOpFh3) obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class SYMLINK3ResFail(rpchelp.Struct): # SYMLINK3resfail dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class SYMLINK3Res(rpchelp.Union): # SYMLINK3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[SYMLINK3ResFail] = rpchelp.rpc_field(SYMLINK3ResFail, default=None) resok: typing.Optional[SYMLINK3ResOK] = rpchelp.rpc_field(SYMLINK3ResOK, default=None) @dataclass class DeviceData3(rpchelp.Struct): # devicedata3 dev_attributes: SAttr3 = rpchelp.rpc_field(SAttr3) spec: SpecData3 = rpchelp.rpc_field(SpecData3) @dataclass class MknodData3(rpchelp.Union): # mknoddata3 SWITCH_OPTIONS = {None: None, NF3BLK: 'blk_device', NF3CHR: 'chr_device', NF3FIFO: 'fifo_pipe_attributes', NF3SOCK: 'sock_pipe_attributes'} type: typing.Union[Ftype3, int] = rpchelp.rpc_field(Ftype3) blk_device: typing.Optional[DeviceData3] = rpchelp.rpc_field(DeviceData3, default=None) chr_device: typing.Optional[DeviceData3] = rpchelp.rpc_field(DeviceData3, default=None) fifo_pipe_attributes: typing.Optional[SAttr3] = rpchelp.rpc_field(SAttr3, default=None) sock_pipe_attributes: typing.Optional[SAttr3] = rpchelp.rpc_field(SAttr3, default=None) @dataclass class MKNOD3Args(rpchelp.Struct): # MKNOD3args where: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) what: MknodData3 = rpchelp.rpc_field(MknodData3) @dataclass class MKNOD3ResOK(rpchelp.Struct): # MKNOD3resok obj_handle: typing.Optional[bytes] = rpchelp.rpc_field(PostOpFh3) obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class MKNOD3ResFail(rpchelp.Struct): # MKNOD3resfail dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class MKNOD3Res(rpchelp.Union): # MKNOD3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[MKNOD3ResFail] = rpchelp.rpc_field(MKNOD3ResFail, default=None) resok: typing.Optional[MKNOD3ResOK] = rpchelp.rpc_field(MKNOD3ResOK, default=None) @dataclass class REMOVE3Args(rpchelp.Struct): # REMOVE3args object: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) @dataclass class REMOVE3ResOK(rpchelp.Struct): # REMOVE3resok dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class REMOVE3ResFail(rpchelp.Struct): # REMOVE3resfail dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class REMOVE3Res(rpchelp.Union): # REMOVE3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[REMOVE3ResFail] = rpchelp.rpc_field(REMOVE3ResFail, default=None) resok: typing.Optional[REMOVE3ResOK] = rpchelp.rpc_field(REMOVE3ResOK, default=None) @dataclass class RMDIR3Args(rpchelp.Struct): # RMDIR3args object: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) @dataclass class RMDIR3ResOK(rpchelp.Struct): # RMDIR3resok dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class RMDIR3ResFail(rpchelp.Struct): # RMDIR3resfail dir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class RMDIR3Res(rpchelp.Union): # RMDIR3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[RMDIR3ResFail] = rpchelp.rpc_field(RMDIR3ResFail, default=None) resok: typing.Optional[RMDIR3ResOK] = rpchelp.rpc_field(RMDIR3ResOK, default=None) @dataclass class RENAME3Args(rpchelp.Struct): # RENAME3args from_: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) to: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) @dataclass class RENAME3ResOK(rpchelp.Struct): # RENAME3resok fromdir_wcc: WccData = rpchelp.rpc_field(WccData) todir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class RENAME3ResFail(rpchelp.Struct): # RENAME3resfail fromdir_wcc: WccData = rpchelp.rpc_field(WccData) todir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class RENAME3Res(rpchelp.Union): # RENAME3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[RENAME3ResFail] = rpchelp.rpc_field(RENAME3ResFail, default=None) resok: typing.Optional[RENAME3ResOK] = rpchelp.rpc_field(RENAME3ResOK, default=None) @dataclass class LINK3Args(rpchelp.Struct): # LINK3args file_handle: bytes = rpchelp.rpc_field(NFSFh3) link: DiropArgs3 = rpchelp.rpc_field(DiropArgs3) @dataclass class LINK3ResOK(rpchelp.Struct): # LINK3resok file_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) linkdir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class LINK3ResFail(rpchelp.Struct): # LINK3resfail file_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) linkdir_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class LINK3Res(rpchelp.Union): # LINK3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[LINK3ResFail] = rpchelp.rpc_field(LINK3ResFail, default=None) resok: typing.Optional[LINK3ResOK] = rpchelp.rpc_field(LINK3ResOK, default=None) @dataclass class READDIR3Args(rpchelp.Struct): # READDIR3args dir_handle: bytes = rpchelp.rpc_field(NFSFh3) cookie: int = rpchelp.rpc_field(Cookie3) cookieverf: bytes = rpchelp.rpc_field(Cookieverf3) count: int = rpchelp.rpc_field(Count3) @dataclass class Entry3(rpchelp.LinkedList): # entry3 fileid: int = rpchelp.rpc_field(Fileid3) name: bytes = rpchelp.rpc_field(Filename3) cookie: int = rpchelp.rpc_field(Cookie3) @dataclass class DirList3(rpchelp.Struct): # dirlist3 entries: typing.List[Entry3] = rpchelp.rpc_field(rpchelp.OptData(Entry3)) eof: bool = rpchelp.rpc_field(rpchelp.r_bool) @dataclass class READDIR3ResOK(rpchelp.Struct): # READDIR3resok dir_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) cookieverf: bytes = rpchelp.rpc_field(Cookieverf3) reply: DirList3 = rpchelp.rpc_field(DirList3) @dataclass class READDIR3ResFail(rpchelp.Struct): # READDIR3resfail dir_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class READDIR3Res(rpchelp.Union): # READDIR3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[READDIR3ResFail] = rpchelp.rpc_field(READDIR3ResFail, default=None) resok: typing.Optional[READDIR3ResOK] = rpchelp.rpc_field(READDIR3ResOK, default=None) @dataclass class READDIRPLUS3Args(rpchelp.Struct): # READDIRPLUS3args dir_handle: bytes = rpchelp.rpc_field(NFSFh3) cookie: int = rpchelp.rpc_field(Cookie3) cookieverf: bytes = rpchelp.rpc_field(Cookieverf3) dircount: int = rpchelp.rpc_field(Count3) maxcount: int = rpchelp.rpc_field(Count3) @dataclass class Entryplus3(rpchelp.LinkedList): # entryplus3 fileid: int = rpchelp.rpc_field(Fileid3) name: bytes = rpchelp.rpc_field(Filename3) cookie: int = rpchelp.rpc_field(Cookie3) name_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) name_handle: typing.Optional[bytes] = rpchelp.rpc_field(PostOpFh3) @dataclass class Dirlistplus3(rpchelp.Struct): # dirlistplus3 entries: typing.List[Entryplus3] = rpchelp.rpc_field(rpchelp.OptData(Entryplus3)) eof: bool = rpchelp.rpc_field(rpchelp.r_bool) @dataclass class READDIRPLUS3ResOK(rpchelp.Struct): # READDIRPLUS3resok dir_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) cookieverf: bytes = rpchelp.rpc_field(Cookieverf3) reply: Dirlistplus3 = rpchelp.rpc_field(Dirlistplus3) @dataclass class READDIRPLUS3ResFail(rpchelp.Struct): # READDIRPLUS3resfail dir_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class READDIRPLUS3Res(rpchelp.Union): # READDIRPLUS3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[READDIRPLUS3ResFail] = rpchelp.rpc_field(READDIRPLUS3ResFail, default=None) resok: typing.Optional[READDIRPLUS3ResOK] = rpchelp.rpc_field(READDIRPLUS3ResOK, default=None) @dataclass class FSSTAT3Args(rpchelp.Struct): # FSSTAT3args fsroot_handle: bytes = rpchelp.rpc_field(NFSFh3) @dataclass class FSSTAT3ResOK(rpchelp.Struct): # FSSTAT3resok obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) tbytes: int = rpchelp.rpc_field(Size3) fbytes: int = rpchelp.rpc_field(Size3) abytes: int = rpchelp.rpc_field(Size3) tfiles: int = rpchelp.rpc_field(Size3) ffiles: int = rpchelp.rpc_field(Size3) afiles: int = rpchelp.rpc_field(Size3) invarsec: int = rpchelp.rpc_field(Uint32) @dataclass class FSSTAT3ResFail(rpchelp.Struct): # FSSTAT3resfail obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class FSSTAT3Res(rpchelp.Union): # FSSTAT3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[FSSTAT3ResFail] = rpchelp.rpc_field(FSSTAT3ResFail, default=None) resok: typing.Optional[FSSTAT3ResOK] = rpchelp.rpc_field(FSSTAT3ResOK, default=None) FSF3_LINK = 0x0001 FSF3_SYMLINK = 0x0002 FSF3_HOMOGENEOUS = 0x0008 FSF3_CANSETTIME = 0x0010 @dataclass class FSINFO3Args(rpchelp.Struct): # FSINFO3args fsroot_handle: bytes = rpchelp.rpc_field(NFSFh3) @dataclass class FSINFO3ResOK(rpchelp.Struct): # FSINFO3resok obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) rtmax: int = rpchelp.rpc_field(Uint32) rtpref: int = rpchelp.rpc_field(Uint32) rtmult: int = rpchelp.rpc_field(Uint32) wtmax: int = rpchelp.rpc_field(Uint32) wtpref: int = rpchelp.rpc_field(Uint32) wtmult: int = rpchelp.rpc_field(Uint32) dtpref: int = rpchelp.rpc_field(Uint32) maxfilesize: int = rpchelp.rpc_field(Size3) time_delta: NFSTime3 = rpchelp.rpc_field(NFSTime3) properties: int = rpchelp.rpc_field(Uint32) @dataclass class FSINFO3ResFail(rpchelp.Struct): # FSINFO3resfail obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class FSINFO3Res(rpchelp.Union): # FSINFO3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[FSINFO3ResFail] = rpchelp.rpc_field(FSINFO3ResFail, default=None) resok: typing.Optional[FSINFO3ResOK] = rpchelp.rpc_field(FSINFO3ResOK, default=None) @dataclass class PATHCONF3Args(rpchelp.Struct): # PATHCONF3args obj_handle: bytes = rpchelp.rpc_field(NFSFh3) @dataclass class PATHCONF3ResOK(rpchelp.Struct): # PATHCONF3resok obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) linkmax: int = rpchelp.rpc_field(Uint32) name_max: int = rpchelp.rpc_field(Uint32) no_trunc: bool = rpchelp.rpc_field(rpchelp.r_bool) chown_restricted: bool = rpchelp.rpc_field(rpchelp.r_bool) case_insensitive: bool = rpchelp.rpc_field(rpchelp.r_bool) case_preserving: bool = rpchelp.rpc_field(rpchelp.r_bool) @dataclass class PATHCONF3ResFail(rpchelp.Struct): # PATHCONF3resfail obj_attributes: typing.Optional[FAttr3] = rpchelp.rpc_field(PostOpAttr) @dataclass class PATHCONF3Res(rpchelp.Union): # PATHCONF3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[PATHCONF3ResFail] = rpchelp.rpc_field(PATHCONF3ResFail, default=None) resok: typing.Optional[PATHCONF3ResOK] = rpchelp.rpc_field(PATHCONF3ResOK, default=None) @dataclass class COMMIT3Args(rpchelp.Struct): # COMMIT3args file_handle: bytes = rpchelp.rpc_field(NFSFh3) offset: int = rpchelp.rpc_field(Offset3) count: int = rpchelp.rpc_field(Count3) @dataclass class COMMIT3ResOK(rpchelp.Struct): # COMMIT3resok file_wcc: WccData = rpchelp.rpc_field(WccData) verf: bytes = rpchelp.rpc_field(Writeverf3) @dataclass class COMMIT3ResFail(rpchelp.Struct): # COMMIT3resfail file_wcc: WccData = rpchelp.rpc_field(WccData) @dataclass class COMMIT3Res(rpchelp.Union): # COMMIT3res SWITCH_OPTIONS = {None: 'resfail', NFS3_OK: 'resok'} status: typing.Union[NFSStat3, int] = rpchelp.rpc_field(NFSStat3) resfail: typing.Optional[COMMIT3ResFail] = rpchelp.rpc_field(COMMIT3ResFail, default=None) resok: typing.Optional[COMMIT3ResOK] = rpchelp.rpc_field(COMMIT3ResOK, default=None) MNTPATHLEN = 1024 MNTNAMLEN = 255 FHSIZE3 = 64 FHandle3 = rpchelp.Opaque(rpchelp.LengthType.VAR, FHSIZE3) DirPath = rpchelp.Opaque(rpchelp.LengthType.VAR, MNTPATHLEN) Name = rpchelp.Opaque(rpchelp.LengthType.VAR, MNTNAMLEN) class MountStat3(rpchelp.Enum): # mountstat3 MNT3_OK = 0 MNT3ERR_PERM = 1 MNT3ERR_NOENT = 2 MNT3ERR_IO = 5 MNT3ERR_ACCES = 13 MNT3ERR_NOTDIR = 20 MNT3ERR_INVAL = 22 MNT3ERR_NAMETOOLONG = 63 MNT3ERR_NOTSUPP = 10004 MNT3ERR_SERVERFAULT = 10006 @dataclass class Mountres3OK(rpchelp.Struct): # mountres3_ok fhandle: bytes = rpchelp.rpc_field(FHandle3) auth_flavors: typing.List[int] = rpchelp.rpc_field(rpchelp.Array(rpchelp.r_int, rpchelp.LengthType.VAR, None)) @dataclass class MountRes3(rpchelp.Union): # mountres3 SWITCH_OPTIONS = {None: None, MNT3_OK: 'mountinfo'} fhs_status: typing.Union[MountStat3, int] = rpchelp.rpc_field(MountStat3) mountinfo: typing.Optional[Mountres3OK] = rpchelp.rpc_field(Mountres3OK, default=None) @dataclass class MountList(rpchelp.LinkedList): # mountlist hostname: bytes = rpchelp.rpc_field(Name) directory: bytes = rpchelp.rpc_field(DirPath) @dataclass class GroupList(rpchelp.LinkedList): # grouplist grname: bytes = rpchelp.rpc_field(Name) @dataclass class ExportList(rpchelp.LinkedList): # exportlist filesys: bytes = rpchelp.rpc_field(DirPath) groups: typing.List[bytes] = rpchelp.rpc_field(GroupList) from shenaniganfs import client, transport class NFS_PROGRAM_3_SERVER(transport.Prog): prog = 100003 vers = 3 procs = { 0: rpchelp.Proc('NULL', rpchelp.r_void, []), 1: rpchelp.Proc('GETATTR', GETATTR3Res, [GETATTR3Args]), 2: rpchelp.Proc('SETATTR', SETATTR3Res, [SETATTR3Args]), 3: rpchelp.Proc('LOOKUP', LOOKUP3Res, [LOOKUP3Args]), 4: rpchelp.Proc('ACCESS', ACCESS3Res, [ACCESS3Args]), 5: rpchelp.Proc('READLINK', READLINK3Res, [READLINK3Args]), 6: rpchelp.Proc('READ', READ3Res, [READ3Args]), 7: rpchelp.Proc('WRITE', WRITE3Res, [WRITE3Args]), 8: rpchelp.Proc('CREATE', CREATE3Res, [CREATE3Args]), 9: rpchelp.Proc('MKDIR', MKDIR3Res, [MKDIR3Args]), 10: rpchelp.Proc('SYMLINK', SYMLINK3Res, [SYMLINK3Args]), 11: rpchelp.Proc('MKNOD', MKNOD3Res, [MKNOD3Args]), 12: rpchelp.Proc('REMOVE', REMOVE3Res, [REMOVE3Args]), 13: rpchelp.Proc('RMDIR', RMDIR3Res, [RMDIR3Args]), 14: rpchelp.Proc('RENAME', RENAME3Res, [RENAME3Args]), 15: rpchelp.Proc('LINK', LINK3Res, [LINK3Args]), 16: rpchelp.Proc('READDIR', READDIR3Res, [READDIR3Args]), 17: rpchelp.Proc('READDIRPLUS', READDIRPLUS3Res, [READDIRPLUS3Args]), 18: rpchelp.Proc('FSSTAT', FSSTAT3Res, [FSSTAT3Args]), 19: rpchelp.Proc('FSINFO', FSINFO3Res, [FSINFO3Args]), 20: rpchelp.Proc('PATHCONF', PATHCONF3Res, [PATHCONF3Args]), 21: rpchelp.Proc('COMMIT', COMMIT3Res, [COMMIT3Args]), } @abc.abstractmethod async def NULL(self, call_ctx: transport.CallContext) \ -> transport.ProcRet[None]: raise NotImplementedError() @abc.abstractmethod async def GETATTR(self, call_ctx: transport.CallContext, arg_0: GETATTR3Args) \ -> transport.ProcRet[GETATTR3Res]: raise NotImplementedError() @abc.abstractmethod async def SETATTR(self, call_ctx: transport.CallContext, arg_0: SETATTR3Args) \ -> transport.ProcRet[SETATTR3Res]: raise NotImplementedError() @abc.abstractmethod async def LOOKUP(self, call_ctx: transport.CallContext, arg_0: LOOKUP3Args) \ -> transport.ProcRet[LOOKUP3Res]: raise NotImplementedError() @abc.abstractmethod async def ACCESS(self, call_ctx: transport.CallContext, arg_0: ACCESS3Args) \ -> transport.ProcRet[ACCESS3Res]: raise NotImplementedError() @abc.abstractmethod async def READLINK(self, call_ctx: transport.CallContext, arg_0: READLINK3Args) \ -> transport.ProcRet[READLINK3Res]: raise NotImplementedError() @abc.abstractmethod async def READ(self, call_ctx: transport.CallContext, arg_0: READ3Args) \ -> transport.ProcRet[READ3Res]: raise NotImplementedError() @abc.abstractmethod async def WRITE(self, call_ctx: transport.CallContext, arg_0: WRITE3Args) \ -> transport.ProcRet[WRITE3Res]: raise NotImplementedError() @abc.abstractmethod async def CREATE(self, call_ctx: transport.CallContext, arg_0: CREATE3Args) \ -> transport.ProcRet[CREATE3Res]: raise NotImplementedError() @abc.abstractmethod async def MKDIR(self, call_ctx: transport.CallContext, arg_0: MKDIR3Args) \ -> transport.ProcRet[MKDIR3Res]: raise NotImplementedError() @abc.abstractmethod async def SYMLINK(self, call_ctx: transport.CallContext, arg_0: SYMLINK3Args) \ -> transport.ProcRet[SYMLINK3Res]: raise NotImplementedError() @abc.abstractmethod async def MKNOD(self, call_ctx: transport.CallContext, arg_0: MKNOD3Args) \ -> transport.ProcRet[MKNOD3Res]: raise NotImplementedError() @abc.abstractmethod async def REMOVE(self, call_ctx: transport.CallContext, arg_0: REMOVE3Args) \ -> transport.ProcRet[REMOVE3Res]: raise NotImplementedError() @abc.abstractmethod async def RMDIR(self, call_ctx: transport.CallContext, arg_0: RMDIR3Args) \ -> transport.ProcRet[RMDIR3Res]: raise NotImplementedError() @abc.abstractmethod async def RENAME(self, call_ctx: transport.CallContext, arg_0: RENAME3Args) \ -> transport.ProcRet[RENAME3Res]: raise NotImplementedError() @abc.abstractmethod async def LINK(self, call_ctx: transport.CallContext, arg_0: LINK3Args) \ -> transport.ProcRet[LINK3Res]: raise NotImplementedError() @abc.abstractmethod async def READDIR(self, call_ctx: transport.CallContext, arg_0: READDIR3Args) \ -> transport.ProcRet[READDIR3Res]: raise NotImplementedError() @abc.abstractmethod async def READDIRPLUS(self, call_ctx: transport.CallContext, arg_0: READDIRPLUS3Args) \ -> transport.ProcRet[READDIRPLUS3Res]: raise NotImplementedError() @abc.abstractmethod async def FSSTAT(self, call_ctx: transport.CallContext, arg_0: FSSTAT3Args) \ -> transport.ProcRet[FSSTAT3Res]: raise NotImplementedError() @abc.abstractmethod async def FSINFO(self, call_ctx: transport.CallContext, arg_0: FSINFO3Args) \ -> transport.ProcRet[FSINFO3Res]: raise NotImplementedError() @abc.abstractmethod async def PATHCONF(self, call_ctx: transport.CallContext, arg_0: PATHCONF3Args) \ -> transport.ProcRet[PATHCONF3Res]: raise NotImplementedError() @abc.abstractmethod async def COMMIT(self, call_ctx: transport.CallContext, arg_0: COMMIT3Args) \ -> transport.ProcRet[COMMIT3Res]: raise NotImplementedError() class NFS_PROGRAM_3_CLIENT(client.BaseClient): prog = 100003 vers = 3 procs = { 0: rpchelp.Proc('NULL', rpchelp.r_void, []), 1: rpchelp.Proc('GETATTR', GETATTR3Res, [GETATTR3Args]), 2: rpchelp.Proc('SETATTR', SETATTR3Res, [SETATTR3Args]), 3: rpchelp.Proc('LOOKUP', LOOKUP3Res, [LOOKUP3Args]), 4: rpchelp.Proc('ACCESS', ACCESS3Res, [ACCESS3Args]), 5: rpchelp.Proc('READLINK', READLINK3Res, [READLINK3Args]), 6: rpchelp.Proc('READ', READ3Res, [READ3Args]), 7: rpchelp.Proc('WRITE', WRITE3Res, [WRITE3Args]), 8: rpchelp.Proc('CREATE', CREATE3Res, [CREATE3Args]), 9: rpchelp.Proc('MKDIR', MKDIR3Res, [MKDIR3Args]), 10: rpchelp.Proc('SYMLINK', SYMLINK3Res, [SYMLINK3Args]), 11: rpchelp.Proc('MKNOD', MKNOD3Res, [MKNOD3Args]), 12: rpchelp.Proc('REMOVE', REMOVE3Res, [REMOVE3Args]), 13: rpchelp.Proc('RMDIR', RMDIR3Res, [RMDIR3Args]), 14: rpchelp.Proc('RENAME', RENAME3Res, [RENAME3Args]), 15: rpchelp.Proc('LINK', LINK3Res, [LINK3Args]), 16: rpchelp.Proc('READDIR', READDIR3Res, [READDIR3Args]), 17: rpchelp.Proc('READDIRPLUS', READDIRPLUS3Res, [READDIRPLUS3Args]), 18: rpchelp.Proc('FSSTAT', FSSTAT3Res, [FSSTAT3Args]), 19: rpchelp.Proc('FSINFO', FSINFO3Res, [FSINFO3Args]), 20: rpchelp.Proc('PATHCONF', PATHCONF3Res, [PATHCONF3Args]), 21: rpchelp.Proc('COMMIT', COMMIT3Res, [COMMIT3Args]), } async def NULL(self) -> client.UnpackedRPCMsg[None]: return await self.send_call(0, ) async def GETATTR(self, arg_0: GETATTR3Args) -> client.UnpackedRPCMsg[GETATTR3Res]: return await self.send_call(1, arg_0) async def SETATTR(self, arg_0: SETATTR3Args) -> client.UnpackedRPCMsg[SETATTR3Res]: return await self.send_call(2, arg_0) async def LOOKUP(self, arg_0: LOOKUP3Args) -> client.UnpackedRPCMsg[LOOKUP3Res]: return await self.send_call(3, arg_0) async def ACCESS(self, arg_0: ACCESS3Args) -> client.UnpackedRPCMsg[ACCESS3Res]: return await self.send_call(4, arg_0) async def READLINK(self, arg_0: READLINK3Args) -> client.UnpackedRPCMsg[READLINK3Res]: return await self.send_call(5, arg_0) async def READ(self, arg_0: READ3Args) -> client.UnpackedRPCMsg[READ3Res]: return await self.send_call(6, arg_0) async def WRITE(self, arg_0: WRITE3Args) -> client.UnpackedRPCMsg[WRITE3Res]: return await self.send_call(7, arg_0) async def CREATE(self, arg_0: CREATE3Args) -> client.UnpackedRPCMsg[CREATE3Res]: return await self.send_call(8, arg_0) async def MKDIR(self, arg_0: MKDIR3Args) -> client.UnpackedRPCMsg[MKDIR3Res]: return await self.send_call(9, arg_0) async def SYMLINK(self, arg_0: SYMLINK3Args) -> client.UnpackedRPCMsg[SYMLINK3Res]: return await self.send_call(10, arg_0) async def MKNOD(self, arg_0: MKNOD3Args) -> client.UnpackedRPCMsg[MKNOD3Res]: return await self.send_call(11, arg_0) async def REMOVE(self, arg_0: REMOVE3Args) -> client.UnpackedRPCMsg[REMOVE3Res]: return await self.send_call(12, arg_0) async def RMDIR(self, arg_0: RMDIR3Args) -> client.UnpackedRPCMsg[RMDIR3Res]: return await self.send_call(13, arg_0) async def RENAME(self, arg_0: RENAME3Args) -> client.UnpackedRPCMsg[RENAME3Res]: return await self.send_call(14, arg_0) async def LINK(self, arg_0: LINK3Args) -> client.UnpackedRPCMsg[LINK3Res]: return await self.send_call(15, arg_0) async def READDIR(self, arg_0: READDIR3Args) -> client.UnpackedRPCMsg[READDIR3Res]: return await self.send_call(16, arg_0) async def READDIRPLUS(self, arg_0: READDIRPLUS3Args) -> client.UnpackedRPCMsg[READDIRPLUS3Res]: return await self.send_call(17, arg_0) async def FSSTAT(self, arg_0: FSSTAT3Args) -> client.UnpackedRPCMsg[FSSTAT3Res]: return await self.send_call(18, arg_0) async def FSINFO(self, arg_0: FSINFO3Args) -> client.UnpackedRPCMsg[FSINFO3Res]: return await self.send_call(19, arg_0) async def PATHCONF(self, arg_0: PATHCONF3Args) -> client.UnpackedRPCMsg[PATHCONF3Res]: return await self.send_call(20, arg_0) async def COMMIT(self, arg_0: COMMIT3Args) -> client.UnpackedRPCMsg[COMMIT3Res]: return await self.send_call(21, arg_0) class MOUNT_PROGRAM_3_SERVER(transport.Prog): prog = 100005 vers = 3 procs = { 0: rpchelp.Proc('NULL', rpchelp.r_void, []), 1: rpchelp.Proc('MNT', MountRes3, [DirPath]), 2: rpchelp.Proc('DUMP', MountList, []), 3: rpchelp.Proc('UMNT', rpchelp.r_void, [DirPath]), 4: rpchelp.Proc('UMNTALL', rpchelp.r_void, []), 5: rpchelp.Proc('EXPORT', ExportList, []), } @abc.abstractmethod async def NULL(self, call_ctx: transport.CallContext) \ -> transport.ProcRet[None]: raise NotImplementedError() @abc.abstractmethod async def MNT(self, call_ctx: transport.CallContext, arg_0: bytes) \ -> transport.ProcRet[MountRes3]: raise NotImplementedError() @abc.abstractmethod async def DUMP(self, call_ctx: transport.CallContext) \ -> transport.ProcRet[typing.List[MountList]]: raise NotImplementedError() @abc.abstractmethod async def UMNT(self, call_ctx: transport.CallContext, arg_0: bytes) \ -> transport.ProcRet[None]: raise NotImplementedError() @abc.abstractmethod async def UMNTALL(self, call_ctx: transport.CallContext) \ -> transport.ProcRet[None]: raise NotImplementedError() @abc.abstractmethod async def EXPORT(self, call_ctx: transport.CallContext) \ -> transport.ProcRet[typing.List[ExportList]]: raise NotImplementedError() class MOUNT_PROGRAM_3_CLIENT(client.BaseClient): prog = 100005 vers = 3 procs = { 0: rpchelp.Proc('NULL', rpchelp.r_void, []), 1: rpchelp.Proc('MNT', MountRes3, [DirPath]), 2: rpchelp.Proc('DUMP', MountList, []), 3: rpchelp.Proc('UMNT', rpchelp.r_void, [DirPath]), 4: rpchelp.Proc('UMNTALL', rpchelp.r_void, []), 5: rpchelp.Proc('EXPORT', ExportList, []), } async def NULL(self) -> client.UnpackedRPCMsg[None]: return await self.send_call(0, ) async def MNT(self, arg_0: bytes) -> client.UnpackedRPCMsg[MountRes3]: return await self.send_call(1, arg_0) async def DUMP(self) -> client.UnpackedRPCMsg[typing.List[MountList]]: return await self.send_call(2, ) async def UMNT(self, arg_0: bytes) -> client.UnpackedRPCMsg[None]: return await self.send_call(3, arg_0) async def UMNTALL(self) -> client.UnpackedRPCMsg[None]: return await self.send_call(4, ) async def EXPORT(self) -> client.UnpackedRPCMsg[typing.List[ExportList]]: return await self.send_call(5, ) __all__ = ['NFS_PROGRAM_3_SERVER', 'NFS_PROGRAM_3_CLIENT', 'MOUNT_PROGRAM_3_SERVER', 'MOUNT_PROGRAM_3_CLIENT', 'TRUE', 'FALSE', 'NFS3_FHSIZE', 'NFS3_COOKIEVERFSIZE', 'NFS3_CREATEVERFSIZE', 'NFS3_WRITEVERFSIZE', 'NFS3_OK', 'NFS3ERR_PERM', 'NFS3ERR_NOENT', 'NFS3ERR_IO', 'NFS3ERR_NXIO', 'NFS3ERR_ACCES', 'NFS3ERR_EXIST', 'NFS3ERR_XDEV', 'NFS3ERR_NODEV', 'NFS3ERR_NOTDIR', 'NFS3ERR_ISDIR', 'NFS3ERR_INVAL', 'NFS3ERR_FBIG', 'NFS3ERR_NOSPC', 'NFS3ERR_ROFS', 'NFS3ERR_MLINK', 'NFS3ERR_NAMETOOLONG', 'NFS3ERR_NOTEMPTY', 'NFS3ERR_DQUOT', 'NFS3ERR_STALE', 'NFS3ERR_REMOTE', 'NFS3ERR_BADHANDLE', 'NFS3ERR_NOT_SYNC', 'NFS3ERR_BAD_COOKIE', 'NFS3ERR_NOTSUPP', 'NFS3ERR_TOOSMALL', 'NFS3ERR_SERVERFAULT', 'NFS3ERR_BADTYPE', 'NFS3ERR_JUKEBOX', 'NF3REG', 'NF3DIR', 'NF3BLK', 'NF3CHR', 'NF3LNK', 'NF3SOCK', 'NF3FIFO', 'DONT_CHANGE', 'SET_TO_SERVER_TIME', 'SET_TO_CLIENT_TIME', 'ACCESS3_READ', 'ACCESS3_LOOKUP', 'ACCESS3_MODIFY', 'ACCESS3_EXTEND', 'ACCESS3_DELETE', 'ACCESS3_EXECUTE', 'UNSTABLE', 'DATA_SYNC', 'FILE_SYNC', 'UNCHECKED', 'GUARDED', 'EXCLUSIVE', 'FSF3_LINK', 'FSF3_SYMLINK', 'FSF3_HOMOGENEOUS', 'FSF3_CANSETTIME', 'MNTPATHLEN', 'MNTNAMLEN', 'FHSIZE3', 'MNT3_OK', 'MNT3ERR_PERM', 'MNT3ERR_NOENT', 'MNT3ERR_IO', 'MNT3ERR_ACCES', 'MNT3ERR_NOTDIR', 'MNT3ERR_INVAL', 'MNT3ERR_NAMETOOLONG', 'MNT3ERR_NOTSUPP', 'MNT3ERR_SERVERFAULT', 'Uint64', 'Int64', 'Uint32', 'Int32', 'Filename3', 'NFSPath3', 'Fileid3', 'Cookie3', 'Cookieverf3', 'Createverf3', 'Writeverf3', 'Uid3', 'Gid3', 'Size3', 'Offset3', 'Mode3', 'Count3', 'NFSStat3', 'Ftype3', 'SpecData3', 'NFSFh3', 'NFSTime3', 'FAttr3', 'WccAttr', 'PostOpAttr', 'PreOpAttr', 'WccData', 'PostOpFh3', 'TimeHow', 'SetMode3', 'SetUid3', 'SetGid3', 'SetSize3', 'SetTime', 'SAttr3', 'DiropArgs3', 'GETATTR3Args', 'GETATTR3ResOK', 'GETATTR3Res', 'Sattrguard3', 'SETATTR3Args', 'SETATTR3ResOK', 'SETATTR3ResFail', 'SETATTR3Res', 'LOOKUP3Args', 'LOOKUP3ResOK', 'LOOKUP3ResFail', 'LOOKUP3Res', 'ACCESS3Args', 'ACCESS3ResOK', 'ACCESS3ResFail', 'ACCESS3Res', 'READLINK3Args', 'READLINK3ResOK', 'READLINK3ResFail', 'READLINK3Res', 'READ3Args', 'READ3ResOK', 'READ3ResFail', 'READ3Res', 'StableHow', 'WRITE3Args', 'WRITE3ResOK', 'WRITE3ResFail', 'WRITE3Res', 'Createmode3', 'Createhow3', 'CREATE3Args', 'CREATE3ResOK', 'CREATE3ResFail', 'CREATE3Res', 'MKDIR3Args', 'MKDIR3ResOK', 'MKDIR3ResFail', 'MKDIR3Res', 'SymlinkData3', 'SYMLINK3Args', 'SYMLINK3ResOK', 'SYMLINK3ResFail', 'SYMLINK3Res', 'DeviceData3', 'MknodData3', 'MKNOD3Args', 'MKNOD3ResOK', 'MKNOD3ResFail', 'MKNOD3Res', 'REMOVE3Args', 'REMOVE3ResOK', 'REMOVE3ResFail', 'REMOVE3Res', 'RMDIR3Args', 'RMDIR3ResOK', 'RMDIR3ResFail', 'RMDIR3Res', 'RENAME3Args', 'RENAME3ResOK', 'RENAME3ResFail', 'RENAME3Res', 'LINK3Args', 'LINK3ResOK', 'LINK3ResFail', 'LINK3Res', 'READDIR3Args', 'Entry3', 'DirList3', 'READDIR3ResOK', 'READDIR3ResFail', 'READDIR3Res', 'READDIRPLUS3Args', 'Entryplus3', 'Dirlistplus3', 'READDIRPLUS3ResOK', 'READDIRPLUS3ResFail', 'READDIRPLUS3Res', 'FSSTAT3Args', 'FSSTAT3ResOK', 'FSSTAT3ResFail', 'FSSTAT3Res', 'FSINFO3Args', 'FSINFO3ResOK', 'FSINFO3ResFail', 'FSINFO3Res', 'PATHCONF3Args', 'PATHCONF3ResOK', 'PATHCONF3ResFail', 'PATHCONF3Res', 'COMMIT3Args', 'COMMIT3ResOK', 'COMMIT3ResFail', 'COMMIT3Res', 'FHandle3', 'DirPath', 'Name', 'MountStat3', 'Mountres3OK', 'MountRes3', 'MountList', 'GroupList', 'ExportList']
36.933769
3,317
0.731127
5,135
45,170
6.279065
0.087244
0.082188
0.123283
0.046336
0.565456
0.501287
0.430264
0.387185
0.360667
0.313867
0
0.042422
0.153531
45,170
1,222
3,318
36.963993
0.800863
0.030684
0
0.554098
1
0
0.071626
0.001008
0
0
0.001374
0
0
1
0
false
0
0.006557
0
0.554098
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
7f5c60a10d6639427097e82981665c3ad59a9a99
27,436
py
Python
pksampler/ControlPanelForm.py
patrickkidd/pksampler-0.3
ffe5f1fde1d86052da34d9ee9c44934461c441e2
[ "MIT" ]
null
null
null
pksampler/ControlPanelForm.py
patrickkidd/pksampler-0.3
ffe5f1fde1d86052da34d9ee9c44934461c441e2
[ "MIT" ]
null
null
null
pksampler/ControlPanelForm.py
patrickkidd/pksampler-0.3
ffe5f1fde1d86052da34d9ee9c44934461c441e2
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file '/home/patrick/pksampler-0.3/pksampler/ControlPanelForm.ui' # # Created: Wed Jun 29 02:38:59 2005 # by: The PyQt User Interface Compiler (pyuic) 3.14.1 # # WARNING! All changes made in this file will be lost! import sys from qt import * image0_data = \ "\x89\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d" \ "\x49\x48\x44\x52\x00\x00\x00\x20\x00\x00\x00\x20" \ "\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\x00\x00\x09" \ "\xd6\x49\x44\x41\x54\x58\x85\x75\x97\x5b\x6f\x1b" \ "\xd7\xb9\x86\x9f\xb5\xd6\x90\x33\x9c\x19\x0e\x49" \ "\x91\x12\x25\x8a\x52\x6c\x49\x6e\x63\x45\x89\x0d" \ "\xcb\x70\x63\x37\x4a\xe3\x16\x28\xda\x8d\x8d\x5d" \ "\x20\xd8\x17\xfd\x09\x01\xfa\x27\xf2\x5b\x72\x59" \ "\xa0\x17\xbb\xd9\x45\x73\x51\x04\x6d\x53\xc7\x68" \ "\x82\x04\xa9\x63\xc1\xb2\x64\xd9\x96\x6d\x3a\x3a" \ "\x99\x1a\x9e\x86\xc3\x39\xad\x5e\xc8\xa6\xa5\xb8" \ "\x79\x81\xc1\x80\x33\x58\xf3\x3d\xeb\x3b\x2e\x8a" \ "\x5f\xfd\xef\xbb\x7a\x76\x69\x1e\xc3\x32\x59\x9e" \ "\x7a\x8b\xcd\xf5\xfb\x6c\x1f\xee\x50\x3f\x53\x25" \ "\x67\x09\x72\x07\x82\xf6\xe6\x03\x16\x4a\x25\x84" \ "\xd6\xec\x04\x01\x59\x14\x31\xe5\x79\x84\x49\x42" \ "\x36\x3d\xcd\xb3\x76\x9b\x62\xb1\x48\xa3\xd1\xc4" \ "\xf7\x25\xd5\x6a\x13\xdb\xce\x53\x2c\x66\x98\x66" \ "\xca\x0f\x69\x65\x65\x45\x88\xf9\xa5\x79\xed\x95" \ "\x3c\x3c\xcb\x63\xed\x9d\x35\x3c\xcf\xe3\xab\x6f" \ "\xbe\xa2\xed\xb7\x11\x42\xd0\x9c\x69\x62\x48\x83" \ "\xbd\x56\x0b\x23\x97\xc3\x71\x5d\x4c\xcb\x22\x49" \ "\x12\xf2\xf9\x3c\xb9\x7c\x1e\xd3\x34\x99\x9a\x9a" \ "\xc2\xf7\x3b\x7c\xf9\xc5\x37\x1c\x1e\xf6\x28\x57" \ "\x2a\xe4\xf3\x0a\xab\x90\xe1\xfb\x3e\x96\x65\xb1" \ "\xf6\xce\x3b\x38\x8e\x33\x06\x98\x9d\x9d\x45\x5c" \ "\xbf\x7e\x5d\xd7\x6a\x35\x06\x47\x47\xb4\xf7\xf6" \ "\x78\xf3\xfc\x79\x12\xa5\xe8\x8c\x46\x08\x21\x50" \ "\xca\xc4\x34\xab\x08\x21\xc9\x32\x49\x2e\xd7\x47" \ "\x88\x94\xde\xbd\x7b\xbc\x5e\xa9\x20\x72\x39\xfa" \ "\xa3\x11\x1b\x7b\x7b\xd4\x85\x60\xb5\xd1\x20\xe7" \ "\x79\xe4\x6a\x35\x36\x46\x23\xfe\xfc\xb7\xbf\x81" \ "\x10\x44\xa3\x11\xef\xfe\xec\x67\x84\xc3\x21\x7e" \ "\xa7\x83\x10\x82\xab\x57\xaf\xa2\x6a\x4a\x7d\xb8" \ "\x6a\x9a\x5c\x36\x4d\xae\xcd\xcd\x31\x15\xc7\xd4" \ "\xa3\x88\xf2\xf2\x32\x8d\xf9\x79\xee\xdf\x7f\xcc" \ "\xe6\x66\x80\x6d\x9f\x27\x49\x1c\xe2\xd8\x63\x6e" \ "\xae\xc8\xcd\x9b\x9f\xf1\xe8\xf1\x63\x6a\x96\x45" \ "\x39\x9f\xa7\x59\x2c\xb2\xbd\xb7\xc7\xc6\xee\x2e" \ "\xeb\xed\x36\x87\xad\x16\xd5\x38\xe6\x6c\xb1\x48" \ "\x43\x4a\x94\x69\xe2\x4e\x4e\xf2\xf9\xcd\x9b\x3c" \ "\x7e\xfc\x98\x5e\xaf\x47\x96\x65\xa8\xf7\x9a\xcd" \ "\x0f\xb3\x38\xe6\xf3\xce\x1e\x77\xdc\x90\x8d\xfc" \ "\x90\xbb\xce\x10\x5d\x33\x28\x15\x2a\x14\x0b\x36" \ "\xb7\x6f\xff\x93\x7b\x8f\xfe\x41\xee\x47\x7b\x14" \ "\xa6\x7c\x86\xb9\x1e\x33\x17\xe7\xb1\x8a\x13\x3c" \ "\xdc\xf7\xb9\xb3\xb9\xc9\x94\x6d\x73\x6d\x79\x99" \ "\x4b\x93\x93\x9c\xa9\x54\xb8\xdb\x6e\xf3\xcf\xbb" \ "\x77\xa9\x5b\x16\x6f\x9e\x3b\xc7\xac\xeb\x12\x75" \ "\x3a\xec\xf8\x3e\xb9\x5c\x8e\x9c\x61\xd0\x6c\x36" \ "\x11\xe7\x7f\x3c\xa7\xcb\x6f\x4d\x63\x37\x3c\x0c" \ "\xcb\xc0\xb6\x6d\x0c\x65\x90\x64\x09\x79\x9d\xe7" \ "\x82\x75\x01\x91\x09\xfe\xf2\xd7\xbf\x70\x6b\xfb" \ "\x16\xcb\x3f\x5f\xa6\x36\x59\x23\x8e\x63\x74\xa4" \ "\x99\x97\xf3\x6c\x7e\x75\x9b\xad\xdb\xb7\x39\x3b" \ "\x35\x85\xed\x38\xf8\xfd\x3e\x83\x20\x60\xe1\xc2" \ "\x05\xb6\xef\xdc\x41\x67\x19\x8e\xeb\x62\xbb\x2e" \ "\x6f\xac\xae\x12\xc5\x31\x59\x96\xb1\xbc\xbc\x8c" \ "\x91\xfb\x71\x85\xc2\x4c\x91\xc0\x0f\x38\xd8\x3e" \ "\x60\xd4\x1b\x91\x44\x09\x49\x94\xe0\x58\x0e\xcd" \ "\xff\x6e\x32\x59\x99\x64\xed\xea\x1a\x41\x10\x70" \ "\xeb\x4f\xb7\x30\x94\x81\x65\x5a\xd8\xb6\xcd\xc4" \ "\xea\x04\xbf\xf8\xf5\xaf\x79\x6b\x75\x95\x83\xa7" \ "\x4f\xb1\x6c\x9b\x73\x8e\x83\xe3\xba\x14\x4b\x25" \ "\xae\xfc\xf4\xa7\xc4\x71\xcc\x70\x38\x64\x30\x18" \ "\x8c\xaf\x28\x8a\xf0\x7d\x1f\x71\xe5\x57\x57\xf4" \ "\xda\xdb\x6b\xfc\xe1\xf7\x7f\x60\xd0\x1f\x60\x99" \ "\x16\x8e\xe3\x90\x37\xf2\x2c\x34\x17\xb8\x74\xe9" \ "\x12\xb6\x6d\x93\x24\x09\x5a\x6b\x46\xa3\x11\x86" \ "\x61\xa0\x94\x42\x08\x71\xea\xc3\xbd\x5e\x6f\xfc" \ "\x71\x21\x04\x00\x42\x08\x0c\xc3\x20\x49\x12\x0c" \ "\xc3\xc0\xf3\x3c\x1c\xc7\xa1\x56\xab\x1d\x87\x60" \ "\xe5\xcd\x15\xfd\x93\x2b\x3f\x61\x6e\x6e\x0e\xc7" \ "\x71\x48\x92\x84\xe1\x70\xc8\xfd\xfb\xf7\x69\xb5" \ "\x5a\x6c\x6d\x6d\xb1\xb8\xb8\x48\xbd\x5e\x67\x38" \ "\x1c\x12\xc7\xf1\xb8\x8c\x84\x10\xe4\x72\x39\x94" \ "\x52\x48\x29\x71\x5d\x17\xdb\xb6\x31\x4d\x13\x21" \ "\x04\x5a\x6b\x84\x10\x48\x29\x29\x16\x8b\x58\x96" \ "\x35\x5e\x9b\x65\x19\xf5\x7a\x1d\xd1\x6c\x36\x75" \ "\x9a\xa6\xd4\xeb\x75\x2e\x5f\xbe\xcc\xf4\xf4\x34" \ "\x07\x07\x07\xec\xec\xec\xb0\xbe\xbe\x4e\x96\x65" \ "\x5c\xba\x74\x89\xc5\xc5\x45\xb4\xd6\x63\xc3\x2f" \ "\xee\x42\x08\x0a\x85\x02\x8e\xe3\x20\xa5\x3c\x65" \ "\x20\x0c\x43\xba\xdd\x2e\xbd\x5e\x8f\x6e\xb7\xcb" \ "\x60\x30\x60\x34\x1a\xa1\xb5\xc6\x30\x0c\x2e\x5c" \ "\xb8\x80\xa8\x56\xab\xfa\xe2\xc5\x8b\xbc\xff\xfe" \ "\xfb\x7c\xfc\xf1\xc7\xb4\x5a\x2d\x4a\xa5\x12\x52" \ "\x4a\xb4\xd6\x4c\x4c\x4c\x70\x78\x78\xc8\xea\xea" \ "\x2a\x67\xce\x9c\x61\xf4\xbc\x3f\x68\xad\x19\x0e" \ "\x87\xf4\x7a\xbd\xb1\x81\x6e\xb7\xcb\x70\x38\x44" \ "\x6b\x8d\x52\x0a\xcb\xb2\xf0\x3c\x0f\xd7\x75\xc7" \ "\xd0\x52\xca\x31\xe8\xe2\xe2\x22\x62\x6d\x6d\x4d" \ "\xe7\x72\x39\xae\x5c\xb9\xc2\xc2\xc2\x02\x49\x92" \ "\xd0\x6e\xb7\x09\xc3\x10\x80\xad\xad\x2d\xa2\x28" \ "\xe2\xdb\x6f\xbf\x25\x0c\x43\xe6\xe6\xe6\xc6\x3b" \ "\xb7\x6d\x9b\x62\xb1\x48\xa9\x54\xa2\x50\x28\x90" \ "\x24\x09\x69\x9a\xa2\x94\x42\x6b\x3d\xde\x84\x52" \ "\x0a\xcf\xf3\x28\x16\x8b\x18\x86\x71\xaa\x13\x1a" \ "\x8d\x46\x83\xf5\xf5\x75\x3e\xfa\xe8\x23\x0c\xc3" \ "\xe0\x83\x0f\x3e\xc0\x34\x4d\x3c\xcf\x63\x7b\x7b" \ "\x1b\xdb\xb6\xb9\x7b\xf7\x2e\x83\xc1\x00\x80\xf9" \ "\xf9\x79\x5c\xd7\x3d\x15\x8a\x17\xa1\xa9\xd5\x6a" \ "\x78\x9e\x37\x36\x90\x24\x92\xd1\x48\x11\x86\x8a" \ "\x20\x50\xf8\xbe\xa0\x50\x48\x99\x9c\x0c\x51\x4a" \ "\x33\xe8\xf7\x11\x2b\x2b\x2b\xba\xd3\xe9\x90\x24" \ "\xc9\x78\xe1\xc2\xc2\x02\xc3\xe1\x10\xc3\x30\xd8" \ "\xdd\xdd\x25\x49\x12\x94\x52\xd4\x6a\x35\x7e\xf1" \ "\xde\x7b\x4c\x97\xcb\xe4\x47\x23\x54\x92\x30\x28" \ "\x97\xc9\x4e\xc4\x1e\x20\x8e\x63\xfe\xf8\xc7\xaf" \ "\xa9\xd7\xaf\x91\xcf\xbf\x80\x4d\x51\x6a\x84\x61" \ "\x44\x28\x65\x30\x3d\x6d\xd0\x98\x01\x23\x4d\x53" \ "\xd2\x34\x1d\x97\x8b\xeb\xba\x58\x96\x45\xa3\xd1" \ "\xa0\x54\x2a\xf1\xf6\x1b\x6f\x50\x3c\x3a\xa2\x66" \ "\x18\x94\x0d\x83\x7c\xab\x85\x78\xf4\x08\xad\x35" \ "\x1a\x70\xca\x65\xf6\x97\x96\x4e\x41\x3c\x7d\xba" \ "\xcb\xe6\xe6\x97\x3c\x79\x72\x8f\xd7\x5f\xff\x2f" \ "\xaa\xd5\x06\xa6\x19\x90\x65\x26\x49\x72\x5c\x69" \ "\xad\x56\x48\xb9\xfc\x1a\xc6\xd9\xb3\x67\xa9\xd5" \ "\x6a\x14\x0a\x05\xaa\xd5\x2a\xae\xeb\x22\xa5\x44" \ "\x04\x01\xf2\xc1\x03\xb2\xdd\x5d\x0c\x21\xa8\x3a" \ "\x0e\x32\xcb\x18\x15\x0a\x84\xae\x8b\x00\x72\x83" \ "\x01\x96\xef\x33\xb5\xbd\xcd\xde\xd2\x12\x7a\x9c" \ "\x68\xe0\xba\x16\xfd\xfe\x11\xff\xfa\xd7\xef\x59" \ "\x5c\x7c\x97\x66\xf3\x32\x96\x15\x52\x28\x0c\x49" \ "\x53\x93\x38\x2e\x32\x18\x58\xa8\x6b\xd7\xae\x7d" \ "\xb8\xbc\xbc\x4c\xa5\x52\xc1\x34\x4d\xe4\x68\x84" \ "\xda\xda\x42\x6f\x6c\x90\x76\xbb\x64\x5a\x93\x6a" \ "\x8d\x00\x4c\xa5\x30\xd2\x94\xe2\xf4\x34\x59\xa5" \ "\x42\x2f\x08\x18\xe4\xf3\xc4\x51\x44\x65\x30\x20" \ "\xa8\x54\x40\x08\x3c\xcf\x63\x62\xc2\x63\x77\x77" \ "\x8f\x20\x08\x79\xf6\xec\x01\x41\xd0\xa6\x5c\x3e" \ "\x47\x9a\x7a\x24\x49\x1e\x50\xd4\xeb\x1e\x06\xc0" \ "\x68\x34\xe2\xf1\xf6\x36\x8b\x5a\x23\xf7\xf7\x8f" \ "\xbb\xde\xf7\x0e\x0f\xfd\x28\xa2\x90\xcb\x51\x5c" \ "\x5a\xc2\x9d\x9d\x25\xfa\xec\x33\x6a\xcf\x93\x2f" \ "\x55\x0a\xd9\x6e\x33\x25\x04\xfb\x67\xcf\x82\x10" \ "\x2c\x2d\x2d\x61\x59\x16\x9f\x7e\xfa\x19\x4f\x9f" \ "\xb6\xd9\xdb\xbb\x43\x10\x3c\xe3\x37\xbf\xf9\x1d" \ "\xab\xab\x17\xf1\x3c\x97\x4a\xc5\x42\x5d\xbc\x78" \ "\xf1\x43\x21\x04\x1b\x9f\x7e\xca\x82\x94\x64\x59" \ "\xc6\x7f\x92\x06\xd2\x2c\x23\xdf\xef\x13\x3c\x7c" \ "\x08\xcf\x73\x40\x0b\x81\x12\x02\xd3\xb2\x98\x2c" \ "\x16\xb1\x1d\x87\x23\x29\x11\x42\x50\x2a\x95\x98" \ "\x99\x99\xa2\xdb\xf5\xf1\xfd\x3e\xa3\xd1\x80\xcd" \ "\xcd\x2f\xe9\xf5\x8e\xf8\xe4\x93\xff\x63\x73\xf3" \ "\xcb\x63\x0f\x54\xab\x55\xf6\xc3\x90\x24\xcb\x30" \ "\xbe\x97\xd1\x27\x15\xc4\x31\xbd\xd1\x88\xa2\x6d" \ "\x63\x3b\x0e\xb5\x7a\x9d\x6a\xa3\x81\xe1\x79\xf4" \ "\xb2\x8c\x56\xbb\xcd\x93\xed\x6d\xd2\x7a\x1d\x55" \ "\x28\x00\x50\xaf\xd7\xf9\xe5\x2f\x7f\xce\x8d\x1b" \ "\x9f\xb3\xbe\xfe\x90\x30\x1c\xf0\xf7\xbf\x7f\x0c" \ "\x40\xa5\xb2\x7a\x0c\x60\x18\x06\xa1\x52\xf4\xa3" \ "\x88\xf2\x89\x7e\xfd\x42\x99\xd6\xa4\x59\x46\x94" \ "\xa6\xf4\x92\x84\xf3\x33\x33\x1c\x66\x19\x5f\xdf" \ "\xbe\xcd\xd6\x27\x9f\xb0\xeb\xfb\x74\xc2\x90\x20" \ "\x8e\x89\xb3\x8c\xff\xf9\xed\x6f\x69\x9c\x39\x33" \ "\x5e\x5f\x2e\x97\xb9\x7e\xfd\x3d\xca\xe5\x6f\xf8" \ "\xfa\xeb\x75\xc2\x30\xc6\xb6\x2d\x66\x66\x6a\x8c" \ "\xdb\xd2\x74\xb3\xc9\xd3\x6e\xf7\x14\x80\x7e\x9e" \ "\x80\x71\x9a\xd2\x8f\x22\xf6\x06\x03\x1e\xfa\x3e" \ "\xff\xbf\xb1\xc1\x28\x49\x88\xb2\x6c\xdc\x84\x4e" \ "\xca\xef\x74\x68\x7c\xef\x99\x6d\xdb\x5c\xbd\xfa" \ "\x36\x2b\x2b\x6f\x10\x45\x11\x8e\xe3\xb0\xb8\xb8" \ "\xf8\x12\x60\x6e\x6e\x8e\x87\x37\x6e\xf0\x7a\xad" \ "\x86\x78\xbe\xeb\x38\xcb\x08\xe2\x98\xc3\x20\xe0" \ "\x81\xef\xd3\xea\x76\x89\xd2\x1f\x3e\xe5\x02\x48" \ "\xc3\xe0\xf1\xfe\x13\x9a\xc6\x59\x02\x19\x60\x67" \ "\x36\x5e\x72\xdc\x1d\xa5\x94\x94\x4a\xa5\x97\x9e" \ "\xcd\xb2\xd3\x00\x37\x3a\x1d\xa2\x34\x45\x00\x61" \ "\x92\xd0\x0e\x43\x1e\xf9\x3e\x3b\x9d\x0e\xc1\x89" \ "\x31\x7c\x52\xca\xcc\xe3\x36\x4a\x14\xe7\xca\xb8" \ "\xcd\x32\xee\xac\x87\xe1\x18\x6c\xe9\x2d\xd0\x20" \ "\xa4\xa0\x91\x6f\x30\x1d\x4d\xbf\xb2\x76\x30\x18" \ "\xbc\x04\x98\x98\x98\x60\x28\x25\xbb\xfd\x3e\x4a" \ "\x08\x5a\xbd\x1e\x0f\x7c\x9f\x6e\x18\x8e\x4b\x52" \ "\x2a\x45\xce\xb2\x28\x78\x45\xec\x39\x8f\xf2\xf2" \ "\x04\xc5\x39\x0f\xe4\xf3\x79\xa0\x41\x20\x20\x03" \ "\x29\x24\x42\x1e\x0f\xad\x7d\xb1\x0f\x79\x5e\x81" \ "\x70\x1c\xe7\x25\x00\xc0\x54\xa3\xc1\x17\xad\x16" \ "\x4a\x08\xfc\x38\xc6\xab\x54\xf8\xd1\xb9\x73\x14" \ "\x2b\x15\x1c\xcf\xc3\xb2\x6d\x28\xc0\x81\x79\xc0" \ "\x50\x0c\xd1\x02\x92\x38\x41\x19\x6a\x3c\x21\x5f" \ "\x5c\x27\x01\x84\x10\x3c\x53\xcf\x90\x48\xa6\xa2" \ "\xa9\xb1\x3d\xc3\x30\x4e\x03\x5c\xbf\x7e\x9d\xfd" \ "\x56\x8b\xe9\x99\x19\x6a\xf5\x3a\x79\xd3\x1c\xbf" \ "\x4b\x48\xd8\x89\x76\x78\x12\x3c\x21\x89\x13\x74" \ "\x76\xec\x97\x51\x7f\x84\x53\x76\x8e\x0d\x0a\x81" \ "\x90\x2f\x8d\x9f\xbc\xcb\x4c\x72\xa4\x8e\x10\x79" \ "\xc1\x64\x34\xf9\x12\xe2\x24\x40\xb3\xd9\xa4\xd9" \ "\x6c\x9e\x72\x93\x46\xb3\x97\xec\x71\x7f\x78\x9f" \ "\x70\x14\x92\xbd\xc8\xfc\xe7\x71\x89\x86\x11\x66" \ "\xc1\xc4\xb4\xcd\xe3\x19\x22\x8f\x8f\x60\x52\xc8" \ "\x53\xbf\x2f\x57\x2e\x33\xec\x0f\x59\x8f\xd7\x91" \ "\x3d\x49\x35\xaa\xbe\x0a\x30\x36\xaa\x35\x47\x47" \ "\x47\x74\xb2\x0e\x47\xf6\x11\x7e\xe8\x93\x25\x19" \ "\x68\x5e\x29\x3b\x9d\x69\x82\x6e\x80\x69\x9b\x98" \ "\xd2\x44\xfa\x92\xb4\x98\x22\x0b\x12\xa9\x8e\x21" \ "\x4c\x69\xb2\xd2\x58\xe1\xc6\x77\x37\x28\x0c\x0b" \ "\xf4\x8c\x1e\xe2\x48\x30\xcb\xec\xab\x00\x77\xee" \ "\xdc\xe1\xe6\x17\x37\x99\xb8\x30\x41\xe9\xb5\x12" \ "\xd9\x20\x43\x67\xfa\x3f\xd6\xfb\x0b\xa5\x71\xca" \ "\xb0\x37\xc4\x9d\x72\x69\xcc\x34\x30\x7a\x06\xe1" \ "\x41\x48\x47\x77\xc8\x4a\x19\xf3\xf5\x79\x94\x52" \ "\x1c\x26\x87\xe4\x9f\xff\x97\x0c\x8c\x00\x3f\xef" \ "\xbf\x0a\xb0\xbb\xbb\x8b\xf6\x34\xee\x8c\x4b\x1a" \ "\xa7\xa7\xdc\xfd\x43\xd2\xfa\xd8\x0b\x81\x17\xd0" \ "\xb2\x5b\xa8\x92\xc2\x9d\x74\x99\x51\x33\xa8\x9e" \ "\xa2\xa9\x9a\x3c\x09\x9e\xa0\x72\x8a\x9c\xcc\x21" \ "\xa4\xc0\x32\x2d\xb4\xa3\x31\xbe\xfb\xee\xbb\x53" \ "\xa7\xd9\x28\x8a\x38\x6a\x1d\x51\xb8\x57\x40\xaa" \ "\x1f\x9e\x0b\xaf\x48\x40\x6f\xb7\x47\xb9\x56\xc6" \ "\x30\x0c\xa4\x94\x28\xa5\x50\x4a\x71\xab\x75\xeb" \ "\xd8\x86\xe4\x65\x72\x0a\x81\x7c\x4d\xf2\x6f\xc9" \ "\xcd\x66\xe0\x2f\x28\xf3\x75\x00\x00\x00\x00\x49" \ "\x45\x4e\x44\xae\x42\x60\x82" class ControlPanelForm(QDialog): def __init__(self,parent = None,name = None,modal = 0,fl = 0): QDialog.__init__(self,parent,name,modal,fl) self.image0 = QPixmap() self.image0.loadFromData(image0_data,"PNG") if not name: self.setName("ControlPanelForm") self.setIcon(self.image0) self.setSizeGripEnabled(0) self.setModal(1) ControlPanelFormLayout = QVBoxLayout(self,6,5,"ControlPanelFormLayout") self.tabWidget2 = QTabWidget(self,"tabWidget2") self.tab = QWidget(self.tabWidget2,"tab") tabLayout = QVBoxLayout(self.tab,6,6,"tabLayout") self.groupBox2 = QGroupBox(self.tab,"groupBox2") self.groupBox2.setLineWidth(2) self.groupBox2.setColumnLayout(0,Qt.Vertical) self.groupBox2.layout().setSpacing(6) self.groupBox2.layout().setMargin(6) groupBox2Layout = QGridLayout(self.groupBox2.layout()) groupBox2Layout.setAlignment(Qt.AlignTop) spacer2 = QSpacerItem(170,21,QSizePolicy.Expanding,QSizePolicy.Minimum) groupBox2Layout.addItem(spacer2,1,0) self.pathListBox = QListBox(self.groupBox2,"pathListBox") groupBox2Layout.addMultiCellWidget(self.pathListBox,0,0,0,2) self.removePathButton = QPushButton(self.groupBox2,"removePathButton") self.removePathButton.setEnabled(0) groupBox2Layout.addWidget(self.removePathButton,1,1) self.addPathButton = QPushButton(self.groupBox2,"addPathButton") groupBox2Layout.addWidget(self.addPathButton,1,2) tabLayout.addWidget(self.groupBox2) layout7 = QHBoxLayout(None,0,6,"layout7") layout6 = QHBoxLayout(None,0,6,"layout6") self.textLabel1_6 = QLabel(self.tab,"textLabel1_6") layout6.addWidget(self.textLabel1_6) self.animationGranularityComboBox = QComboBox(0,self.tab,"animationGranularityComboBox") layout6.addWidget(self.animationGranularityComboBox) layout7.addLayout(layout6) spacer3 = QSpacerItem(90,21,QSizePolicy.Expanding,QSizePolicy.Minimum) layout7.addItem(spacer3) layout2 = QHBoxLayout(None,0,6,"layout2") self.textLabel1_2 = QLabel(self.tab,"textLabel1_2") layout2.addWidget(self.textLabel1_2) self.pitchComboBox = QComboBox(0,self.tab,"pitchComboBox") layout2.addWidget(self.pitchComboBox) layout7.addLayout(layout2) tabLayout.addLayout(layout7) self.groupBox4 = QGroupBox(self.tab,"groupBox4") self.groupBox4.setColumnLayout(0,Qt.Vertical) self.groupBox4.layout().setSpacing(6) self.groupBox4.layout().setMargin(6) groupBox4Layout = QVBoxLayout(self.groupBox4.layout()) groupBox4Layout.setAlignment(Qt.AlignTop) layout5 = QHBoxLayout(None,0,6,"layout5") self.gradientsCheckBox = QCheckBox(self.groupBox4,"gradientsCheckBox") layout5.addWidget(self.gradientsCheckBox) spacer7 = QSpacerItem(116,21,QSizePolicy.Expanding,QSizePolicy.Minimum) layout5.addItem(spacer7) self.useOutputCheckBox = QCheckBox(self.groupBox4,"useOutputCheckBox") layout5.addWidget(self.useOutputCheckBox) groupBox4Layout.addLayout(layout5) layout4 = QVBoxLayout(None,0,6,"layout4") self.updateIntervalSlider = QSlider(self.groupBox4,"updateIntervalSlider") self.updateIntervalSlider.setMaxValue(500) self.updateIntervalSlider.setValue(100) self.updateIntervalSlider.setOrientation(QSlider.Horizontal) self.updateIntervalSlider.setTickmarks(QSlider.Below) self.updateIntervalSlider.setTickInterval(50) layout4.addWidget(self.updateIntervalSlider) layout3 = QHBoxLayout(None,0,6,"layout3") self.textLabel2_4 = QLabel(self.groupBox4,"textLabel2_4") layout3.addWidget(self.textLabel2_4) spacer4 = QSpacerItem(61,21,QSizePolicy.Expanding,QSizePolicy.Minimum) layout3.addItem(spacer4) self.textLabel1_5 = QLabel(self.groupBox4,"textLabel1_5") layout3.addWidget(self.textLabel1_5) spacer5 = QSpacerItem(61,21,QSizePolicy.Expanding,QSizePolicy.Minimum) layout3.addItem(spacer5) self.textLabel3 = QLabel(self.groupBox4,"textLabel3") layout3.addWidget(self.textLabel3) layout4.addLayout(layout3) groupBox4Layout.addLayout(layout4) tabLayout.addWidget(self.groupBox4) self.tabWidget2.insertTab(self.tab,QString.fromLatin1("")) self.tab_2 = QWidget(self.tabWidget2,"tab_2") tabLayout_2 = QVBoxLayout(self.tab_2,6,6,"tabLayout_2") self.groupBox1 = QGroupBox(self.tab_2,"groupBox1") self.groupBox1.setEnabled(0) self.groupBox1.setLineWidth(2) self.groupBox1.setColumnLayout(0,Qt.Vertical) self.groupBox1.layout().setSpacing(6) self.groupBox1.layout().setMargin(6) groupBox1Layout = QGridLayout(self.groupBox1.layout()) groupBox1Layout.setAlignment(Qt.AlignTop) self.textLabel2 = QLabel(self.groupBox1,"textLabel2") self.textLabel2.setSizePolicy(QSizePolicy(QSizePolicy.Fixed,QSizePolicy.Preferred,0,0,self.textLabel2.sizePolicy().hasHeightForWidth())) groupBox1Layout.addWidget(self.textLabel2,0,1) self.bufferSizeListBox = QListBox(self.groupBox1,"bufferSizeListBox") self.bufferSizeListBox.setEnabled(0) self.bufferSizeListBox.setSizePolicy(QSizePolicy(QSizePolicy.Fixed,QSizePolicy.Expanding,0,0,self.bufferSizeListBox.sizePolicy().hasHeightForWidth())) self.bufferSizeListBox.setFrameShape(QListBox.StyledPanel) groupBox1Layout.addWidget(self.bufferSizeListBox,1,1) self.textLabel1 = QLabel(self.groupBox1,"textLabel1") self.textLabel1.setPaletteForegroundColor(QColor(255,0,0)) groupBox1Layout.addWidget(self.textLabel1,0,0) self.driverListBox = QListBox(self.groupBox1,"driverListBox") groupBox1Layout.addWidget(self.driverListBox,1,0) tabLayout_2.addWidget(self.groupBox1) self.groupBox3 = QGroupBox(self.tab_2,"groupBox3") self.groupBox3.setLineWidth(2) self.groupBox3.setColumnLayout(0,Qt.Vertical) self.groupBox3.layout().setSpacing(6) self.groupBox3.layout().setMargin(6) groupBox3Layout = QGridLayout(self.groupBox3.layout()) groupBox3Layout.setAlignment(Qt.AlignTop) self.cueOutputListBox = QListBox(self.groupBox3,"cueOutputListBox") groupBox3Layout.addWidget(self.cueOutputListBox,2,1) self.mainOutputListBox = QListBox(self.groupBox3,"mainOutputListBox") groupBox3Layout.addWidget(self.mainOutputListBox,2,0) self.textLabel2_2 = QLabel(self.groupBox3,"textLabel2_2") groupBox3Layout.addWidget(self.textLabel2_2,1,1) self.textLabel1_3 = QLabel(self.groupBox3,"textLabel1_3") groupBox3Layout.addWidget(self.textLabel1_3,1,0) self.textLabel2_3 = QLabel(self.groupBox3,"textLabel2_3") groupBox3Layout.addMultiCellWidget(self.textLabel2_3,0,0,0,1) tabLayout_2.addWidget(self.groupBox3) self.tabWidget2.insertTab(self.tab_2,QString.fromLatin1("")) self.TabPage = QWidget(self.tabWidget2,"TabPage") TabPageLayout = QVBoxLayout(self.TabPage,6,6,"TabPageLayout") self.textLabel1_4 = QLabel(self.TabPage,"textLabel1_4") TabPageLayout.addWidget(self.textLabel1_4) self.effectsListBox = QListBox(self.TabPage,"effectsListBox") self.effectsListBox.setSelectionMode(QListBox.NoSelection) TabPageLayout.addWidget(self.effectsListBox) self.tabWidget2.insertTab(self.TabPage,QString.fromLatin1("")) ControlPanelFormLayout.addWidget(self.tabWidget2) layout4_2 = QHBoxLayout(None,0,6,"layout4_2") Horizontal_Spacing2 = QSpacerItem(198,20,QSizePolicy.Expanding,QSizePolicy.Minimum) layout4_2.addItem(Horizontal_Spacing2) self.buttonOk = QPushButton(self,"buttonOk") self.buttonOk.setAutoDefault(1) self.buttonOk.setDefault(1) layout4_2.addWidget(self.buttonOk) self.buttonApply = QPushButton(self,"buttonApply") self.buttonApply.setEnabled(0) layout4_2.addWidget(self.buttonApply) self.buttonCancel = QPushButton(self,"buttonCancel") self.buttonCancel.setAutoDefault(1) layout4_2.addWidget(self.buttonCancel) ControlPanelFormLayout.addLayout(layout4_2) self.languageChange() self.resize(QSize(441,393).expandedTo(self.minimumSizeHint())) self.clearWState(Qt.WState_Polished) self.connect(self.buttonApply,SIGNAL("clicked()"),self.slotApply) self.connect(self.gradientsCheckBox,SIGNAL("toggled(bool)"),self.setDirty) self.connect(self.pitchComboBox,SIGNAL("activated(const QString&)"),self.setDirty) self.connect(self.pathListBox,SIGNAL("selectionChanged()"),self.slotPathSelectionChanged) self.connect(self.removePathButton,SIGNAL("clicked()"),self.slotRemovePath) self.connect(self.addPathButton,SIGNAL("clicked()"),self.slotAddPath) self.connect(self.buttonOk,SIGNAL("clicked()"),self.accept) self.connect(self.buttonOk,SIGNAL("clicked()"),self.slotApply) self.connect(self.buttonCancel,SIGNAL("clicked()"),self.reject) self.connect(self.bufferSizeListBox,SIGNAL("highlighted(const QString&)"),self.setDirty) self.connect(self.driverListBox,SIGNAL("highlighted(const QString&)"),self.setDirty) self.connect(self.mainOutputListBox,SIGNAL("selectionChanged(QListBoxItem*)"),self.slotMainOutputSelected) self.connect(self.cueOutputListBox,SIGNAL("selectionChanged(QListBoxItem*)"),self.slotCueOutputSelected) self.connect(self.updateIntervalSlider,SIGNAL("sliderMoved(int)"),self.setDirty) self.connect(self.useOutputCheckBox,SIGNAL("stateChanged(int)"),self.setDirty) self.connect(self.updateIntervalSlider,SIGNAL("valueChanged(int)"),self.slotUpdateInterval) self.connect(self.animationGranularityComboBox,SIGNAL("activated(const QString&)"),self.setDirty) def languageChange(self): self.setCaption(self.__tr("PKSampler: Control Panel")) self.groupBox2.setTitle(self.__tr("Selector Paths")) self.removePathButton.setText(self.__tr("Remove")) self.addPathButton.setText(self.__tr("Add...")) self.textLabel1_6.setText(self.__tr("Animation granularity\n" "(more = faster)")) self.animationGranularityComboBox.clear() self.animationGranularityComboBox.insertItem(self.__tr("1")) self.animationGranularityComboBox.insertItem(self.__tr("2")) self.animationGranularityComboBox.insertItem(self.__tr("3")) self.animationGranularityComboBox.insertItem(self.__tr("4")) self.animationGranularityComboBox.insertItem(self.__tr("6")) self.animationGranularityComboBox.insertItem(self.__tr("8")) self.textLabel1_2.setText(self.__tr("Pitch Max")) self.pitchComboBox.clear() self.pitchComboBox.insertItem(self.__tr("8%")) self.pitchComboBox.insertItem(self.__tr("12%")) self.pitchComboBox.insertItem(self.__tr("24%")) self.pitchComboBox.insertItem(self.__tr("100%")) self.groupBox4.setTitle(self.__tr("User Interface")) self.gradientsCheckBox.setText(self.__tr("Draw Gradients")) self.useOutputCheckBox.setText(self.__tr("Use Output Window")) QToolTip.add(self.updateIntervalSlider,self.__tr("Adjusts the display interval for all user interface components")) self.textLabel2_4.setText(self.__tr("Faster (0ms)")) self.textLabel1_5.setText(self.__tr("GUI Update interval")) self.textLabel3.setText(self.__tr("Slower (500ms)")) self.tabWidget2.changeTab(self.tab,self.__tr("General")) self.groupBox1.setTitle(self.__tr("Driver")) self.textLabel2.setText(self.__tr("Buffer Size (bytes)")) self.bufferSizeListBox.clear() self.bufferSizeListBox.insertItem(self.__tr("256")) self.bufferSizeListBox.insertItem(self.__tr("512")) self.bufferSizeListBox.insertItem(self.__tr("1024")) self.bufferSizeListBox.insertItem(self.__tr("2048")) self.bufferSizeListBox.insertItem(self.__tr("4096")) self.textLabel1.setText(self.__tr("<u>You may need to restart the application for these.</u>")) self.driverListBox.clear() self.driverListBox.insertItem(self.__tr("alsa")) self.driverListBox.insertItem(self.__tr("arts")) self.driverListBox.insertItem(self.__tr("devdsp")) self.driverListBox.insertItem(self.__tr("null")) self.groupBox3.setTitle(self.__tr("Outputs")) self.textLabel2_2.setText(self.__tr("Cue")) self.textLabel1_3.setText(self.__tr("Main")) self.textLabel2_3.setText(self.__tr("Select the audio devices to be used for the main and cue zones.")) self.tabWidget2.changeTab(self.tab_2,self.__tr("Audio Devices")) self.textLabel1_4.setText(self.__tr("Detected LadspaPlugins. These plugins cannot be used yet.")) self.tabWidget2.changeTab(self.TabPage,self.__tr("Effects")) self.buttonOk.setText(self.__tr("&OK")) self.buttonOk.setAccel(QString.null) self.buttonApply.setText(self.__tr("A&pply")) self.buttonApply.setAccel(self.__tr("Alt+P")) self.buttonCancel.setText(self.__tr("&Cancel")) self.buttonCancel.setAccel(QString.null) def slotApply(self): print "ControlPanelForm.slotApply(): Not implemented yet" def slotAddPath(self): print "ControlPanelForm.slotAddPath(): Not implemented yet" def slotRemovePath(self): print "ControlPanelForm.slotRemovePath(): Not implemented yet" def slotPathSelectionChanged(self): print "ControlPanelForm.slotPathSelectionChanged(): Not implemented yet" def slotPitchRangeChanged(self,a0): print "ControlPanelForm.slotPitchRangeChanged(const QString&): Not implemented yet" def slotMainOutputSelected(self,a0): print "ControlPanelForm.slotMainOutputSelected(QListBoxItem*): Not implemented yet" def slotCueOutputSelected(self,a0): print "ControlPanelForm.slotCueOutputSelected(QListBoxItem*): Not implemented yet" def slotUpdateInterval(self,a0): print "ControlPanelForm.slotUpdateInterval(int): Not implemented yet" def setDirty(self): print "ControlPanelForm.setDirty(): Not implemented yet" def __tr(self,s,c = None): return qApp.translate("ControlPanelForm",s,c) if __name__ == "__main__": a = QApplication(sys.argv) QObject.connect(a,SIGNAL("lastWindowClosed()"),a,SLOT("quit()")) w = ControlPanelForm() a.setMainWidget(w) w.show() a.exec_loop()
49.256732
158
0.675135
4,139
27,436
4.434646
0.137714
0.015364
0.016562
0.009807
0.106293
0.029638
0.023427
0.013402
0.013402
0
0
0.157879
0.140035
27,436
556
159
49.345324
0.620073
0.01006
0
0
1
0.455319
0.453816
0.39673
0
0
0
0
0
0
null
null
0
0.004255
null
null
0.019149
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
7f69edf35f8e4bcc9bc0a61151304a9e10c71491
3,419
py
Python
person/migrations/0002_driver_name.py
zhangpotato/kx_backmanage
5a484c19616a77a75698f63b8fa220ee027c0c17
[ "Apache-2.0" ]
null
null
null
person/migrations/0002_driver_name.py
zhangpotato/kx_backmanage
5a484c19616a77a75698f63b8fa220ee027c0c17
[ "Apache-2.0" ]
null
null
null
person/migrations/0002_driver_name.py
zhangpotato/kx_backmanage
5a484c19616a77a75698f63b8fa220ee027c0c17
[ "Apache-2.0" ]
null
null
null
# Generated by Django 2.1.5 on 2019-02-03 12:18 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ('person', '0001_initial'), ] operations = [ migrations.CreateModel( name='Driver_name', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('username', models.CharField(blank=True, max_length=30, null=True)), ('password', models.CharField(blank=True, max_length=30, null=True)), ('level', models.IntegerField(blank=True, null=True)), ('image', models.ImageField(blank=True, null=True, upload_to='')), ('name', models.CharField(blank=True, max_length=15, null=True)), ('sex', models.CharField(blank=True, max_length=5, null=True)), ('idcard', models.CharField(blank=True, max_length=20, null=True)), ('age', models.IntegerField(blank=True, null=True)), ('phone', models.CharField(blank=True, max_length=20, null=True)), ('drive_type', models.CharField(blank=True, max_length=20, null=True)), ('idcard_start', models.DateTimeField(blank=True, null=True)), ('idcard_end', models.DateTimeField(blank=True, null=True)), ('police_address', models.CharField(blank=True, max_length=50, null=True)), ('family_name', models.CharField(blank=True, max_length=20, null=True)), ('family_phone', models.CharField(blank=True, max_length=20, null=True)), ('family_relation', models.CharField(blank=True, max_length=10, null=True)), ('family_address', models.CharField(blank=True, max_length=100, null=True)), ('driver_card_number', models.CharField(blank=True, max_length=20, null=True)), ('driver_file_number', models.CharField(blank=True, max_length=50, null=True)), ('work_grade', models.CharField(blank=True, max_length=50, null=True)), ('in_time', models.DateTimeField(blank=True, null=True)), ('address', models.CharField(blank=True, max_length=50, null=True)), ('driver_card_endtime', models.DateTimeField(blank=True, null=True)), ('grade_content', models.CharField(blank=True, max_length=50, null=True)), ('drive_card_updatetime', models.DateTimeField(blank=True, null=True)), ('work_number', models.CharField(blank=True, max_length=30, null=True)), ('work_use_date', models.DateTimeField(blank=True, null=True)), ('work_type', models.CharField(blank=True, max_length=50, null=True)), ('next_study_time', models.DateTimeField(blank=True, null=True)), ('grade_log', models.CharField(blank=True, max_length=50, null=True)), ('integrity_content', models.CharField(blank=True, max_length=50, null=True)), ('min_phonenumber', models.CharField(blank=True, max_length=6, null=True)), ('other', models.CharField(blank=True, max_length=100, null=True)), ], options={ 'verbose_name': 'driver_name', 'db_table': 'driver_name', }, ), ]
57.949153
114
0.598713
388
3,419
5.118557
0.237113
0.149547
0.231621
0.277946
0.699899
0.699899
0.57855
0.437563
0.437563
0.176737
0
0.025331
0.249488
3,419
58
115
58.948276
0.748636
0.013162
0
0
1
0
0.126631
0.006228
0
0
0
0
0
1
0
false
0.019608
0.019608
0
0.098039
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
7f6f28798257d254df3d12c07dd2ddef60089cbd
205
py
Python
src/onceml/types/component_msg.py
lzmchina/OnceML
f30d9037d2e492d8d45b858f2be3b27fc5258356
[ "MIT" ]
1
2022-01-01T07:15:03.000Z
2022-01-01T07:15:03.000Z
src/onceml/types/component_msg.py
lzmchina/OnceML
f30d9037d2e492d8d45b858f2be3b27fc5258356
[ "MIT" ]
null
null
null
src/onceml/types/component_msg.py
lzmchina/OnceML
f30d9037d2e492d8d45b858f2be3b27fc5258356
[ "MIT" ]
null
null
null
from enum import Enum # component数据文件里具体的子目录划分 class Component_Data_URL(Enum): STATE = 'state.json' # 组件状态的文件 CHANNELS = 'result.json' # 组件的运行结果,轻量化数据 ARTIFACTS = 'artifact' # 组件产生的各种数据文件
22.777778
45
0.712195
22
205
6.545455
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.195122
205
8
46
25.625
0.872727
0.273171
0
0
0
0
0.201389
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
7f786ba3af87092f7b856f92aae67a384e2a54d8
106
py
Python
rwafake/data/emails/domains.py
knowbee/py-rwafaker
7ca4ba0538cc7536bfcb9a2147bc9a811bc4b1ed
[ "MIT" ]
1
2021-10-10T04:25:25.000Z
2021-10-10T04:25:25.000Z
rwafake/data/emails/domains.py
knowbee/py-rwafaker
7ca4ba0538cc7536bfcb9a2147bc9a811bc4b1ed
[ "MIT" ]
null
null
null
rwafake/data/emails/domains.py
knowbee/py-rwafaker
7ca4ba0538cc7536bfcb9a2147bc9a811bc4b1ed
[ "MIT" ]
null
null
null
email = [ "rwandaonline.rw", "rra.gov.rw", "ur.ac.rw", "gmail.com", "yahoo.com", "yahoo.fr" ]
11.777778
20
0.528302
15
106
3.733333
0.733333
0.285714
0
0
0
0
0
0
0
0
0
0
0.207547
106
8
21
13.25
0.666667
0
0
0
0
0
0.556604
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
7f9abff28b4e1dbb0769bc77184cfc5a507bbbc3
18,931
py
Python
contrib/addons/test/nagios/plugins/test_sys_logger.py
cglewis/ambari
e5d7e08e42baea2ad15784400c6f9e7ebb5f1608
[ "Apache-2.0" ]
2
2018-06-06T14:21:11.000Z
2018-06-06T14:22:50.000Z
contrib/addons/test/nagios/plugins/test_sys_logger.py
cglewis/ambari
e5d7e08e42baea2ad15784400c6f9e7ebb5f1608
[ "Apache-2.0" ]
null
null
null
contrib/addons/test/nagios/plugins/test_sys_logger.py
cglewis/ambari
e5d7e08e42baea2ad15784400c6f9e7ebb5f1608
[ "Apache-2.0" ]
2
2020-11-04T06:30:31.000Z
2020-11-06T11:02:33.000Z
#!/usr/bin/python # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys sys.path.append('../src') import sys_logger tests_passed = 0 tests_failed = 0 def test_log_tvi_msg(msg): global tests_passed, tests_failed if msg == expected_log_msg: print 'Test Passed' tests_passed += 1 else: print '*** TEST FAILED ***' print 'Expected MSG: {0}'.format(expected_log_msg) print 'Actual MSG : {0}'.format(msg) tests_failed += 1 sys_logger.log_tvi_msg = test_log_tvi_msg def test(tvi_rule, expected_msg, arg1, arg2, arg3, arg4, arg5): sys.stdout.write(tvi_rule + ': ') global expected_log_msg expected_log_msg = expected_msg sys_logger.generate_tvi_log_msg(arg1, arg2, arg3, arg4, arg5) def summary(): total_tests = tests_passed + tests_failed print '\nTests Run: {0}'.format(total_tests) print 'Passed: {0}, Failed: {1}'.format(tests_passed, tests_failed) if not tests_failed: print 'SUCCESS! All tests pass.' # Hadoop_Host_Down test('Hadoop_Host_Down', 'Critical: Hadoop: host_down# Event Host=MY_HOST(CRITICAL), PING FAILED - Packet loss = 100%, RTA = 0.00 ms', 'HARD', '1', 'CRITICAL', 'Host::Ping', 'Event Host=MY_HOST(CRITICAL), PING FAILED - Packet loss = 100%, RTA = 0.00 ms') test('Hadoop_Host_Down:OK', 'OK: Hadoop: host_down_ok# Event Host=MY_HOST(OK), PING SUCCESS - Packet loss = 0%, RTA = 1.00 ms', 'HARD', '1', 'OK', 'Host::Ping', 'Event Host=MY_HOST(OK), PING SUCCESS - Packet loss = 0%, RTA = 1.00 ms') # Hadoop_Master_Daemon_CPU_Utilization test('Hadoop_Master_Daemon_CPU_Utilization', 'Critical: Hadoop: master_cpu_utilization# Event Host=MY_HOST Service Description=HBASEMASTER::HBaseMaster CPU utilization(CRITICAL), 4 CPU, average load 2.5% 200%', 'HARD', '1', 'CRITICAL', 'HBASEMASTER::HBaseMaster CPU utilization', 'Event Host=MY_HOST Service Description=HBASEMASTER::HBaseMaster CPU utilization(CRITICAL), 4 CPU, average load 2.5% 200%') test('Hadoop_Master_Daemon_CPU_Utilization:Degraded', 'Degraded: Hadoop: master_cpu_utilization# Event Host=MY_HOST Service Description=HBASEMASTER::HBaseMaster CPU utilization(CRITICAL), 4 CPU, average load 2.5% 200%', 'HARD', '1', 'WARNING', 'HBASEMASTER::HBaseMaster CPU utilization', 'Event Host=MY_HOST Service Description=HBASEMASTER::HBaseMaster CPU utilization(CRITICAL), 4 CPU, average load 2.5% 200%') test('Hadoop_Master_Daemon_CPU_Utilization:OK', 'OK: Hadoop: master_cpu_utilization_ok# Event Host=MY_HOST Service Description=HBASEMASTER::HBaseMaster CPU utilization(OK), 4 CPU, average load 2.5% 200%', 'HARD', '1', 'OK', 'HBASEMASTER::HBaseMaster CPU utilization', 'Event Host=MY_HOST Service Description=HBASEMASTER::HBaseMaster CPU utilization(OK), 4 CPU, average load 2.5% 200%') # Hadoop_HDFS_Percent_Capacity test('Hadoop_HDFS_Percent_Capacity', 'Critical: Hadoop: hdfs_percent_capacity# Event Host=MY_HOST Service Description=HDFS::HDFS Capacity utilization(CRITICAL),DFSUsedGB:0.1, DFSTotalGB:1568.7', 'HARD', '1', 'CRITICAL', 'HDFS::HDFS Capacity utilization', 'Event Host=MY_HOST Service Description=HDFS::HDFS Capacity utilization(CRITICAL),DFSUsedGB:0.1, DFSTotalGB:1568.7') test('Hadoop_HDFS_Percent_Capacity:OK', 'OK: Hadoop: hdfs_percent_capacity_ok# Event Host=MY_HOST Service Description=HDFS::HDFS Capacity utilization(OK),DFSUsedGB:0.1, DFSTotalGB:1568.7', 'HARD', '1', 'OK', 'HDFS::HDFS Capacity utilization', 'Event Host=MY_HOST Service Description=HDFS::HDFS Capacity utilization(OK),DFSUsedGB:0.1, DFSTotalGB:1568.7') # Hadoop_HDFS_Corrupt_Missing_Blocks test('Hadoop_HDFS_Corrupt_Missing_Blocks', 'Critical: Hadoop: hdfs_block# Event Host=MY_HOST Service Description=HDFS::Corrupt/Missing blocks(CRITICAL), corrupt_blocks:0, missing_blocks:0, total_blocks:147', 'HARD', '1', 'CRITICAL', 'HDFS::Corrupt/Missing blocks', 'Event Host=MY_HOST Service Description=HDFS::Corrupt/Missing blocks(CRITICAL), corrupt_blocks:0, missing_blocks:0, total_blocks:147') test('Hadoop_HDFS_Corrupt_Missing_Blocks:OK', 'OK: Hadoop: hdfs_block_ok# Event Host=MY_HOST Service Description=HDFS::Corrupt/Missing blocks(OK), corrupt_blocks:0, missing_blocks:0, total_blocks:147', 'HARD', '1', 'OK', 'HDFS::Corrupt/Missing blocks', 'Event Host=MY_HOST Service Description=HDFS::Corrupt/Missing blocks(OK), corrupt_blocks:0, missing_blocks:0, total_blocks:147') # Hadoop_NameNode_Edit_Log_Dir_Write test('Hadoop_NameNode_Edit_Log_Dir_Write', 'Critical: Hadoop: namenode_edit_log_write# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'NAMENODE::Namenode Edit logs directory status', 'SERVICE MSG') test('Hadoop_NameNode_Edit_Log_Dir_Write:OK', 'OK: Hadoop: namenode_edit_log_write_ok# SERVICE MSG', 'HARD', '1', 'OK', 'NAMENODE::Namenode Edit logs directory status', 'SERVICE MSG') # Hadoop_DataNode_Down test('Hadoop_DataNode_Down', 'Critical: Hadoop: datanode_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'HDFS::Percent DataNodes down','SERVICE MSG') test('Hadoop_DataNode_Down:OK', 'OK: Hadoop: datanode_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'HDFS::Percent DataNodes down','SERVICE MSG') # Hadoop_DataNode_Process_Down test('Hadoop_DataNode_Process_Down', 'Critical: Hadoop: datanode_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'DATANODE::Process down', 'SERVICE MSG') test('Hadoop_DataNode_Process_Down:OK', 'OK: Hadoop: datanode_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'DATANODE::Process down', 'SERVICE MSG') # Hadoop_Percent_DataNodes_Storage_Full test('Hadoop_Percent_DataNodes_Storage_Full', 'Critical: Hadoop: datanodes_percent_storage_full# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'HDFS::Percent DataNodes storage full', 'SERVICE MSG') test('Hadoop_Percent_DataNodes_Storage_Full:OK', 'OK: Hadoop: datanodes_percent_storage_full_ok# SERVICE MSG', 'HARD', '1', 'OK', 'HDFS::Percent DataNodes storage full', 'SERVICE MSG') # Hadoop_NameNode_Process_Down test('Hadoop_NameNode_Process_Down:CRITICAL', 'Fatal: Hadoop: namenode_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'NAMENODE::Namenode Process down', 'SERVICE MSG') test('Hadoop_NameNode_Process_Down:WARNING', 'Fatal: Hadoop: namenode_process_down# SERVICE MSG', 'HARD', '1', 'WARNING', 'NAMENODE::Namenode Process down', 'SERVICE MSG') test('Hadoop_NameNode_Process_Down:UNKNOWN', 'Fatal: Hadoop: namenode_process_down# SERVICE MSG', 'HARD', '1', 'UNKNOWN', 'NAMENODE::Namenode Process down', 'SERVICE MSG') test('Hadoop_NameNode_Process_Down:OK', 'OK: Hadoop: namenode_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'NAMENODE::Namenode Process down', 'SERVICE MSG') # Hadoop_Secondary_NameNode_Process_Down test('Hadoop_Secondary_NameNode_Process_Down', 'Critical: Hadoop: secondary_namenode_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'NAMENODE::Secondary Namenode Process down', 'SERVICE MSG') test('Hadoop_Secondary_NameNode_Process_Down:OK', 'OK: Hadoop: secondary_namenode_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'NAMENODE::Secondary Namenode Process down', 'SERVICE MSG') # Hadoop_NameNode_RPC_Latency test('Hadoop_NameNode_RPC_Latency', 'Critical: Hadoop: namenode_rpc_latency# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'HDFS::Namenode RPC Latency', 'SERVICE MSG') test('Hadoop_NameNode_RPC_Latency:Degraded', 'Degraded: Hadoop: namenode_rpc_latency# SERVICE MSG', 'HARD', '1', 'WARNING', 'HDFS::Namenode RPC Latency', 'SERVICE MSG') test('Hadoop_NameNode_RPC_Latency:OK', 'OK: Hadoop: namenode_rpc_latency_ok# SERVICE MSG', 'HARD', '1', 'OK', 'HDFS::Namenode RPC Latency', 'SERVICE MSG') # Hadoop_DataNodes_Storage_Full test('Hadoop_DataNodes_Storage_Full', 'Critical: Hadoop: datanodes_storage_full# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'DATANODE::Storage full', 'SERVICE MSG') test('Hadoop_DataNodes_Storage_Full:OK', 'OK: Hadoop: datanodes_storage_full_ok# SERVICE MSG', 'HARD', '1', 'OK', 'DATANODE::Storage full', 'SERVICE MSG') # Hadoop_JobTracker_Process_Down test('Hadoop_JobTracker_Process_Down', 'Critical: Hadoop: jobtracker_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'JOBTRACKER::Jobtracker Process down', 'SERVICE MSG') test('Hadoop_JobTracker_Process_Down:OK', 'OK: Hadoop: jobtracker_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'JOBTRACKER::Jobtracker Process down', 'SERVICE MSG') # Hadoop_JobTracker_RPC_Latency test('Hadoop_JobTracker_RPC_Latency', 'Critical: Hadoop: jobtracker_rpc_latency# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'MAPREDUCE::JobTracker RPC Latency', 'SERVICE MSG') test('Hadoop_JobTracker_RPC_Latency:Degraded', 'Degraded: Hadoop: jobtracker_rpc_latency# SERVICE MSG', 'HARD', '1', 'WARNING', 'MAPREDUCE::JobTracker RPC Latency', 'SERVICE MSG') test('Hadoop_JobTracker_RPC_Latency:OK', 'OK: Hadoop: jobtracker_rpc_latency_ok# SERVICE MSG', 'HARD', '1', 'OK', 'MAPREDUCE::JobTracker RPC Latency', 'SERVICE MSG') # Hadoop_JobTracker_CPU_Utilization test('Hadoop_JobTracker_CPU_Utilization', 'Critical: Hadoop: jobtracker_cpu_utilization# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'JOBTRACKER::Jobtracker CPU utilization', 'SERVICE MSG') test('Hadoop_JobTracker_CPU_Utilization:Degraded', 'Degraded: Hadoop: jobtracker_cpu_utilization# SERVICE MSG', 'HARD', '1', 'WARNING', 'JOBTRACKER::Jobtracker CPU utilization', 'SERVICE MSG') test('Hadoop_JobTracker_CPU_Utilization:OK', 'OK: Hadoop: jobtracker_cpu_utilization_ok# SERVICE MSG', 'HARD', '1', 'OK', 'JOBTRACKER::Jobtracker CPU utilization', 'SERVICE MSG') # Hadoop_TaskTracker_Down test('Hadoop_TaskTracker_Down', 'Critical: Hadoop: tasktrackers_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'MAPREDUCE::Percent TaskTrackers down', 'SERVICE MSG') test('Hadoop_TaskTracker_Down:OK', 'OK: Hadoop: tasktrackers_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'MAPREDUCE::Percent TaskTrackers down', 'SERVICE MSG') # Hadoop_TaskTracker_Process_Down test('Hadoop_TaskTracker_Process_Down', 'Critical: Hadoop: tasktracker_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'TASKTRACKER::Process down', 'SERVICE MSG') test('Hadoop_TaskTracker_Process_Down:OK', 'OK: Hadoop: tasktracker_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'TASKTRACKER::Process down', 'SERVICE MSG') # Hadoop_HBaseMaster_Process_Down test('Hadoop_HBaseMaster_Process_Down', 'Critical: Hadoop: hbasemaster_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'HBASEMASTER::HBaseMaster Process down', 'SERVICE MSG') test('Hadoop_HBaseMaster_Process_Down:OK', 'OK: Hadoop: hbasemaster_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'HBASEMASTER::HBaseMaster Process down', 'SERVICE MSG') # Hadoop_RegionServer_Process_Down test('Hadoop_RegionServer_Process_Down', 'Critical: Hadoop: regionserver_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'REGIONSERVER::Process down', 'SERVICE MSG') test('Hadoop_RegionServer_Process_Down:OK', 'OK: Hadoop: regionserver_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'REGIONSERVER::Process down', 'SERVICE MSG') # Hadoop_RegionServer_Down test('Hadoop_RegionServer_Down', 'Critical: Hadoop: regionservers_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'HBASE::Percent region servers down', 'SERVICE MSG') test('Hadoop_RegionServer_Down:OK', 'OK: Hadoop: regionservers_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'HBASE::Percent region servers down', 'SERVICE MSG') # Hadoop_Hive_Metastore_Process_Down test('Hadoop_Hive_Metastore_Process_Down', 'Critical: Hadoop: hive_metastore_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'HIVE-METASTORE::HIVE-METASTORE status check', 'SERVICE MSG') test('Hadoop_Hive_Metastore_Process_Down:OK', 'OK: Hadoop: hive_metastore_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'HIVE-METASTORE::HIVE-METASTORE status check', 'SERVICE MSG') # Hadoop_Zookeeper_Down test('Hadoop_Zookeeper_Down', 'Critical: Hadoop: zookeepers_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'ZOOKEEPER::Percent zookeeper servers down', 'SERVICE MSG') test('Hadoop_Zookeeper_Down:OK', 'OK: Hadoop: zookeepers_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'ZOOKEEPER::Percent zookeeper servers down', 'SERVICE MSG') # Hadoop_Zookeeper_Process_Down test('Hadoop_Zookeeper_Process_Down', 'Critical: Hadoop: zookeeper_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'ZKSERVERS::ZKSERVERS Process down', 'SERVICE MSG') test('Hadoop_Zookeeper_Process_Down:OK', 'OK: Hadoop: zookeeper_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'ZKSERVERS::ZKSERVERS Process down', 'SERVICE MSG') # Hadoop_Oozie_Down test('Hadoop_Oozie_Down', 'Critical: Hadoop: oozie_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'OOZIE::Oozie status check', 'SERVICE MSG') test('Hadoop_Oozie_Down:OK', 'OK: Hadoop: oozie_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'OOZIE::Oozie status check', 'SERVICE MSG') # Hadoop_Templeton_Down test('Hadoop_Templeton_Down', 'Critical: Hadoop: templeton_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'TEMPLETON::Templeton status check', 'SERVICE MSG') test('Hadoop_Templeton_Down:OK', 'OK: Hadoop: templeton_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'TEMPLETON::Templeton status check', 'SERVICE MSG') # Hadoop_Puppet_Down test('Hadoop_Puppet_Down', 'Critical: Hadoop: puppet_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'PUPPET::Puppet agent down', 'SERVICE MSG') test('Hadoop_Puppet_Down:OK', 'OK: Hadoop: puppet_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'PUPPET::Puppet agent down', 'SERVICE MSG') # Hadoop_Nagios_Status_Log_Stale test('Hadoop_Nagios_Status_Log_Stale', 'Critical: Hadoop: nagios_status_log_stale# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'NAGIOS::Nagios status log staleness', 'SERVICE MSG') test('Hadoop_Nagios_Status_Log_Stale:OK', 'OK: Hadoop: nagios_status_log_stale_ok# SERVICE MSG', 'HARD', '1', 'OK', 'NAGIOS::Nagios status log staleness', 'SERVICE MSG') # Hadoop_Ganglia_Process_Down test('Hadoop_Ganglia_Process_Down', 'Critical: Hadoop: ganglia_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'GANGLIA::Ganglia [gmetad] Process down', 'SERVICE MSG') test('Hadoop_Ganglia_Process_Down:OK', 'OK: Hadoop: ganglia_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'GANGLIA::Ganglia [gmetad] Process down', 'SERVICE MSG') # Hadoop_Ganglia_Collector_Process_Down test('Hadoop_Ganglia_Collector_Process_Down', 'Critical: Hadoop: ganglia_collector_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'GANGLIA::Ganglia collector [gmond] Process down alert for hbasemaster', 'SERVICE MSG') test('Hadoop_Ganglia_Collector_Process_Down:OK', 'OK: Hadoop: ganglia_collector_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'GANGLIA::Ganglia collector [gmond] Process down alert for hbasemaster', 'SERVICE MSG') # Hadoop_Ganglia_Collector_Process_Down test('Hadoop_Ganglia_Collector_Process_Down', 'Critical: Hadoop: ganglia_collector_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'GANGLIA::Ganglia collector [gmond] Process down alert for jobtracker', 'SERVICE MSG') test('Hadoop_Ganglia_Collector_Process_Down:OK', 'OK: Hadoop: ganglia_collector_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'GANGLIA::Ganglia collector [gmond] Process down alert for jobtracker', 'SERVICE MSG') # Hadoop_Ganglia_Collector_Process_Down test('Hadoop_Ganglia_Collector_Process_Down', 'Critical: Hadoop: ganglia_collector_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'GANGLIA::Ganglia collector [gmond] Process down alert for namenode', 'SERVICE MSG') test('Hadoop_Ganglia_Collector_Process_Down:OK', 'OK: Hadoop: ganglia_collector_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'GANGLIA::Ganglia collector [gmond] Process down alert for namenode', 'SERVICE MSG') # Hadoop_Ganglia_Collector_Process_Down test('Hadoop_Ganglia_Collector_Process_Down', 'Critical: Hadoop: ganglia_collector_process_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'GANGLIA::Ganglia collector [gmond] Process down alert for slaves', 'SERVICE MSG') test('Hadoop_Ganglia_Collector_Process_Down:OK', 'OK: Hadoop: ganglia_collector_process_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'GANGLIA::Ganglia collector [gmond] Process down alert for slaves', 'SERVICE MSG') # Hadoop_UNKNOWN_MSG test('Hadoop_UNKNOWN_MSG', 'Critical: Hadoop: HADOOP_UNKNOWN_MSG# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'ANY UNKNOWN SERVICE', 'SERVICE MSG') # HBase UI Down test('Hadoop_HBase_UI_Down', 'Critical: Hadoop: hbase_ui_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'HBASEMASTER::HBase Web UI down', 'SERVICE MSG') test('Hadoop_HBase_UI_Down:OK', 'OK: Hadoop: hbase_ui_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'HBASEMASTER::HBase Web UI down', 'SERVICE MSG') # Namenode UI Down test('Hadoop_NameNode_UI_Down', 'Critical: Hadoop: namenode_ui_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'NAMENODE::Namenode Web UI down', 'SERVICE MSG') test('Hadoop_NameNode_UI_Down:OK', 'OK: Hadoop: namenode_ui_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'NAMENODE::Namenode Web UI down', 'SERVICE MSG') # JobHistory UI Down test('Hadoop_JobHistory_UI_Down', 'Critical: Hadoop: jobhistory_ui_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'JOBTRACKER::JobHistory Web UI down', 'SERVICE MSG') test('Hadoop_JobHistory_UI_Down:OK', 'OK: Hadoop: jobhistory_ui_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'JOBTRACKER::JobHistory Web UI down', 'SERVICE MSG') # JobTracker UI Down test('Hadoop_JobTracker_UI_Down', 'Critical: Hadoop: jobtracker_ui_down# SERVICE MSG', 'HARD', '1', 'CRITICAL', 'JOBTRACKER::JobTracker Web UI down', 'SERVICE MSG') test('Hadoop_JobTracker_UI_Down:OK', 'OK: Hadoop: jobtracker_ui_down_ok# SERVICE MSG', 'HARD', '1', 'OK', 'JOBTRACKER::JobTracker Web UI down', 'SERVICE MSG') summary()
45.837772
171
0.72891
2,485
18,931
5.307445
0.080885
0.106149
0.074304
0.079612
0.80461
0.671999
0.534157
0.432557
0.348169
0.310713
0
0.012562
0.133749
18,931
412
172
45.949029
0.791695
0.096244
0
0.077206
0
0.066176
0.748945
0.291569
0
0
0
0
0
0
null
null
0.025735
0.007353
null
null
0.025735
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
7f9eb20f5bc91ea77309f8140fd5d90bc4af511e
611
py
Python
exercise/built_in_module_test.py
progzc/PythonDemo
0515fee3511bc132bfddf480014f61ce52080616
[ "Apache-2.0" ]
null
null
null
exercise/built_in_module_test.py
progzc/PythonDemo
0515fee3511bc132bfddf480014f61ce52080616
[ "Apache-2.0" ]
null
null
null
exercise/built_in_module_test.py
progzc/PythonDemo
0515fee3511bc132bfddf480014f61ce52080616
[ "Apache-2.0" ]
null
null
null
# python中的常用的内置模块 import sys import time # 文件输入输出 # import os # import calendar # 网络请求相关 import urllib.request # import json # 正则表达式 # import re # import math # import decimal # import logging print('----------------使用sys模块---------------------') # 获取整数24在python中占用的字节大小 print(sys.getsizeof(24)) print(sys.getsizeof(45)) print(sys.getsizeof(True)) print(sys.getsizeof(False)) print('----------------使用sys模块---------------------') # 输出单位为秒 print(time.time()) print(time.localtime(time.time())) print('----------------使用os模块---------------------') print(urllib.request.urlopen('http://www.baidu.com').read())
19.709677
60
0.608838
66
611
5.636364
0.5
0.086022
0.182796
0
0
0
0
0
0
0
0
0.010753
0.086743
611
30
61
20.366667
0.655914
0.252046
0
0.153846
0
0
0.340858
0.295711
0
0
0
0
0
1
0
true
0
0.230769
0
0.230769
0.769231
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
3
7fb68fdf49ccda83376f335f667e9c17e2e632ef
706
py
Python
pytglib/api/types/message_scheduling_state_send_when_online.py
iTeam-co/pytglib
e5e75e0a85f89b77762209b32a61b0a883c0ae61
[ "MIT" ]
6
2019-10-30T08:57:27.000Z
2021-02-08T14:17:43.000Z
pytglib/api/types/message_scheduling_state_send_when_online.py
iTeam-co/python-telegram
e5e75e0a85f89b77762209b32a61b0a883c0ae61
[ "MIT" ]
1
2021-08-19T05:44:10.000Z
2021-08-19T07:14:56.000Z
pytglib/api/types/message_scheduling_state_send_when_online.py
iTeam-co/python-telegram
e5e75e0a85f89b77762209b32a61b0a883c0ae61
[ "MIT" ]
5
2019-12-04T05:30:39.000Z
2021-05-21T18:23:32.000Z
from ..utils import Object class MessageSchedulingStateSendWhenOnline(Object): """ The message will be sent when the peer will be online. Applicable to private chats only and when the exact online status of the peer is known Attributes: ID (:obj:`str`): ``MessageSchedulingStateSendWhenOnline`` No parameters required. Returns: MessageSchedulingState Raises: :class:`telegram.Error` """ ID = "messageSchedulingStateSendWhenOnline" def __init__(self, **kwargs): pass @staticmethod def read(q: dict, *args) -> "MessageSchedulingStateSendWhenOnline": return MessageSchedulingStateSendWhenOnline()
22.774194
145
0.677054
64
706
7.40625
0.75
0.025316
0
0
0
0
0
0
0
0
0
0
0.246459
706
30
146
23.533333
0.890977
0.446176
0
0
0
0
0.208696
0.208696
0
0
0
0
0
1
0.25
false
0.125
0.125
0.125
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
0
0
0
3
7fb804f29021e56f60c86fed818a89e1258a5e19
133
py
Python
binalyzer_blsp/__init__.py
denisvasilik/binalyzer-blsp
8a329c62b3432c9ecc8c073febdd3975892defac
[ "MIT" ]
null
null
null
binalyzer_blsp/__init__.py
denisvasilik/binalyzer-blsp
8a329c62b3432c9ecc8c073febdd3975892defac
[ "MIT" ]
null
null
null
binalyzer_blsp/__init__.py
denisvasilik/binalyzer-blsp
8a329c62b3432c9ecc8c073febdd3975892defac
[ "MIT" ]
null
null
null
""" binalyzer_blsp ~~~~~~~~~~~~~~ Binalyzer BLSP extension. """ from .blsp import ( BLSPServer, JsonRPCParser, )
13.3
29
0.548872
10
133
7.2
0.7
0.361111
0
0
0
0
0
0
0
0
0
0
0.255639
133
10
30
13.3
0.727273
0.421053
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
f69d1e0ccae8597a11c7f1d728ba18d1950c03ad
93
py
Python
SISTGEDIT/registropersonal/tipoturno/apps.py
Edwineverth/SISGEDIT
7d433d5e045b71ac087e40d95ec7d26f159e6453
[ "Apache-2.0" ]
null
null
null
SISTGEDIT/registropersonal/tipoturno/apps.py
Edwineverth/SISGEDIT
7d433d5e045b71ac087e40d95ec7d26f159e6453
[ "Apache-2.0" ]
null
null
null
SISTGEDIT/registropersonal/tipoturno/apps.py
Edwineverth/SISGEDIT
7d433d5e045b71ac087e40d95ec7d26f159e6453
[ "Apache-2.0" ]
null
null
null
from django.apps import AppConfig class TipoturnoConfig(AppConfig): name = 'tipoturno'
15.5
33
0.763441
10
93
7.1
0.9
0
0
0
0
0
0
0
0
0
0
0
0.16129
93
5
34
18.6
0.910256
0
0
0
0
0
0.096774
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
f69e8ae8265b7e97a387c1704c752b8f2f57e82a
159
py
Python
space_missions/apps.py
eferos93/ODMC_exam_implementation
14fd113b00dc45d2b99b2a83c662d6a70171b5e5
[ "CC-BY-4.0" ]
null
null
null
space_missions/apps.py
eferos93/ODMC_exam_implementation
14fd113b00dc45d2b99b2a83c662d6a70171b5e5
[ "CC-BY-4.0" ]
null
null
null
space_missions/apps.py
eferos93/ODMC_exam_implementation
14fd113b00dc45d2b99b2a83c662d6a70171b5e5
[ "CC-BY-4.0" ]
null
null
null
from django.apps import AppConfig class SpaceMissionsConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'space_missions'
22.714286
56
0.779874
18
159
6.722222
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.138365
159
6
57
26.5
0.883212
0
0
0
0
0
0.27044
0.18239
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3