hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
d3b146cefcbdfbb497115b74257a2891722524b5
1,988
py
Python
promgen/util.py
sundy-li/promgen
e532bde46b542dd66f46e3dd654bc1ad31deeec7
[ "MIT" ]
null
null
null
promgen/util.py
sundy-li/promgen
e532bde46b542dd66f46e3dd654bc1ad31deeec7
[ "MIT" ]
8
2021-04-08T21:59:34.000Z
2022-02-10T10:42:43.000Z
promgen/util.py
Andreich2010/promgen
dae2b720f30b0c002aa50a74c4c4fc8dfbcbb2b7
[ "MIT", "Apache-2.0", "BSD-3-Clause" ]
null
null
null
# Copyright (c) 2017 LINE Corporation # These sources are released under the terms of the MIT license: see LICENSE import requests.sessions from django.db.models import F from promgen.version import __version__ from django.conf import settings # Wrappers around request api to ensure we always attach our user agent # https://github.com/requests/requests/blob/master/requests/api.py def post(url, data=None, json=None, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.post(url, data=data, json=json, **kwargs) def get(url, params=None, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.get(url, params=params, **kwargs) def delete(url, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.delete(url, **kwargs) def setting(key, default=None, domain=None): """ Settings helper based on saltstack's query Allows a simple way to query settings from YAML using the style `path:to:key` to represent path: to: key: value """ rtn = settings.PROMGEN if domain: rtn = rtn[domain] for index in key.split(":"): try: rtn = rtn[index] except KeyError: return default return rtn class HelpFor: # Wrap a model's lower level api so that we can easily # grab help_text for a specific field # help_text = HelpFor(DjangoModel) # help_test.field_name def __init__(self, model): self.model = model def __getattr__(self, name): return self.model._meta.get_field(name).help_text def inc_for_pk(model, pk, **kwargs): # key=F('key') + value model.objects.filter(pk=pk).update(**{key: F(key) + kwargs[key] for key in kwargs})
28.811594
87
0.667505
270
1,988
4.792593
0.437037
0.049459
0.041731
0.060278
0.233385
0.233385
0.233385
0.233385
0.233385
0.233385
0
0.002572
0.217807
1,988
68
88
29.235294
0.829582
0.288229
0
0.181818
0
0
0.044623
0
0
0
0
0
0
1
0.212121
false
0
0.121212
0.030303
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d3c48e47d2fa33e8114041e17aa2a33b9c9c1809
895
py
Python
chapter04/ifelse.py
persevere-in-coding-persist-in-learning/python2
b207d0040232abae63638784b34a950b932bef77
[ "Apache-2.0" ]
3
2020-08-05T01:15:41.000Z
2020-08-05T09:28:36.000Z
chapter04/ifelse.py
persevere-in-coding-persist-in-learning/python2
b207d0040232abae63638784b34a950b932bef77
[ "Apache-2.0" ]
null
null
null
chapter04/ifelse.py
persevere-in-coding-persist-in-learning/python2
b207d0040232abae63638784b34a950b932bef77
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 """ 控制结构if elif else的研究 Version: 0.1 Author: huijz Date: 2020-08-24 """ # 例1:if的基本用法: flag = False name = 'huijz' if name == 'python': # 判断变量是否为 python flag = True # 条件成立时设置标志为真 print 'welcome boss' # 并输出欢迎信息 else: print name # 条件不成立时输出变量名称 # 例2:elif用法 num = 5 if num == 3: # 判断num的值 print 'boss' elif num == 2: print 'user' elif num == 1: print 'worker' elif num < 0: # 值小于零时输出 print 'error' else: print 'road' # 条件均不成立时输出 # 例3:if语句多个条件 num = 9 if 0 <= num <= 10: # 判断值是否在0~10之间 print 'hello' # 输出结果: hello num = 10 if num < 0 or num > 10: # 判断值是否在小于0或大于10 print 'hello' else: print 'unDefine' # 输出结果: unDefine num = 8 # 判断值是否在0~5或者10~15之间 if (0 <= num <= 5) or (10 <= num <= 15): print 'hello' else: print 'unDefine' # 输出结果: unDefine # 例4:var = 100 var = 100 if var == 100: print "变量 var 的值为100" print "Good bye!"
16.886792
41
0.606704
132
895
4.113636
0.492424
0.066298
0.022099
0.069982
0.143646
0.143646
0.143646
0
0
0
0
0.08982
0.253631
895
52
42
17.211538
0.723054
0.248045
0
0.264706
0
0
0.169811
0
0
0
0
0
0
0
null
null
0
0
null
null
0.411765
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
2
d3c7eb72e9d8627f04182ce89238416d18909674
1,436
py
Python
src/core/stats.py
dynaryu/vaws
f6ed9b75408f7ce6100ed59b7754f745e59be152
[ "BSD-3-Clause" ]
null
null
null
src/core/stats.py
dynaryu/vaws
f6ed9b75408f7ce6100ed59b7754f745e59be152
[ "BSD-3-Clause" ]
null
null
null
src/core/stats.py
dynaryu/vaws
f6ed9b75408f7ce6100ed59b7754f745e59be152
[ "BSD-3-Clause" ]
null
null
null
import math def lognormal_mean(m, stddev): """ compute mean of log x with mean and std. of x Args: m: mean of x stddev: standard deviation of x Returns: mean of log x """ return math.log(m) - (0.5 * math.log(1.0 + (stddev * stddev) / (m * m))) def lognormal_stddev(m, stddev): """ compute std. of log x with mean and std. of x Args: m: mean of x stddev: standard deviation of x Returns: std. of log x """ return math.sqrt(math.log((stddev * stddev) / (m * m) + 1)) def lognormal_underlying_mean(m, stddev): """ compute mean of x with mean and std of log x Args: m: mean of log x stddev: std of log x Returns: """ # if m == 0 or stddev == 0: # print '{}'.format('why ???') # return 0 return math.exp(m + 0.5 * stddev * stddev) def lognormal_underlying_stddev(m, stddev): """ compute std of x with mean and std of log x Args: m: mean of log x stddev: std of log x Returns: std of x """ # if m == 0 or stddev == 0: # print '{}'.format('strange why???') # return 0 return math.sqrt((math.exp(stddev**2.0) - 1.0) * math.exp(2.0*m + stddev**2.0)) #return lognormal_underlying_mean(m, stddev) * \ # math.sqrt((math.exp(stddev * stddev) - 1.0))
23.16129
77
0.521588
212
1,436
3.495283
0.150943
0.067476
0.080972
0.072874
0.746289
0.531714
0.410256
0.410256
0.345479
0.345479
0
0.024705
0.351671
1,436
61
78
23.540984
0.771214
0.509749
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0.1
0
0.9
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d3d87b798d29e52210031306b4e2f4fee10a8cd2
992
py
Python
stacker/tests/providers/aws/test_interactive.py
GoodRx/stacker
0cf1df67b4ae5aeda5845442c84905909101c238
[ "BSD-2-Clause" ]
1
2021-11-06T17:01:01.000Z
2021-11-06T17:01:01.000Z
stacker/tests/providers/aws/test_interactive.py
GoodRx/stacker
0cf1df67b4ae5aeda5845442c84905909101c238
[ "BSD-2-Clause" ]
null
null
null
stacker/tests/providers/aws/test_interactive.py
GoodRx/stacker
0cf1df67b4ae5aeda5845442c84905909101c238
[ "BSD-2-Clause" ]
1
2021-11-06T17:00:53.000Z
2021-11-06T17:00:53.000Z
import unittest from ....providers.aws.interactive import requires_replacement def generate_resource_change(replacement=True): resource_change = { "Action": "Modify", "Details": [], "LogicalResourceId": "Fake", "PhysicalResourceId": "arn:aws:fake", "Replacement": "True" if replacement else "False", "ResourceType": "AWS::Fake", "Scope": ["Properties"], } return { "ResourceChange": resource_change, "Type": "Resource", } class TestInteractiveProvider(unittest.TestCase): def test_requires_replacement(self): changeset = [ generate_resource_change(), generate_resource_change(replacement=False), generate_resource_change(), ] replacement = requires_replacement(changeset) self.assertEqual(len(replacement), 2) for resource in replacement: self.assertEqual(resource["ResourceChange"]["Replacement"], "True")
30.060606
79
0.634073
83
992
7.409639
0.481928
0.136585
0.143089
0.160976
0
0
0
0
0
0
0
0.001337
0.245968
992
32
80
31
0.820856
0
0
0.074074
1
0
0.18246
0
0
0
0
0
0.074074
1
0.074074
false
0
0.074074
0
0.222222
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d3db48db20a20bc47e28f0062af79ebd64f3fa41
811
py
Python
forms/views.py
urchinpro/L2-forms
37f33386984efbb2d1e92c73d915256247801109
[ "MIT" ]
null
null
null
forms/views.py
urchinpro/L2-forms
37f33386984efbb2d1e92c73d915256247801109
[ "MIT" ]
null
null
null
forms/views.py
urchinpro/L2-forms
37f33386984efbb2d1e92c73d915256247801109
[ "MIT" ]
null
null
null
from django.http import HttpResponse from django.utils.module_loading import import_string def pdf(request): """ Get form's number (decimal type: 101.15 - where "101" is form's group and "15"-number itsels). Can't use 1,2,3,4,5,6,7,8,9 for number itsels - which stands after the point. Bacause in database field store in decimal format xxx.yy - two number after dot, and active status. Must use: 01,02,03-09,10,11,12-19,20,21,22-29,30,31..... :param request: :return: """ response = HttpResponse(content_type='application/pdf') t = request.GET.get("type") response['Content-Disposition'] = 'inline; filename="form-' + t + '.pdf"' f = import_string('forms.forms' + t[0:3] + '.form_' + t[4:6]) response.write(f(request_data=request.GET)) return response
38.619048
103
0.670777
130
811
4.138462
0.638462
0.055762
0
0
0
0
0
0
0
0
0
0.076807
0.181258
811
20
104
40.55
0.733434
0.436498
0
0
0
0
0.19715
0
0
0
0
0
0
1
0.111111
false
0
0.333333
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
d3de757442c04a58c632f23911d3bb3230eadbab
572
py
Python
parkrundata/views.py
remarkablerocket/parkrundata
c717b59771629d6308ec093e29fd373981726fde
[ "BSD-3-Clause" ]
null
null
null
parkrundata/views.py
remarkablerocket/parkrundata
c717b59771629d6308ec093e29fd373981726fde
[ "BSD-3-Clause" ]
null
null
null
parkrundata/views.py
remarkablerocket/parkrundata
c717b59771629d6308ec093e29fd373981726fde
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- from rest_framework import viewsets from rest_framework.permissions import IsAuthenticatedOrReadOnly from .models import Country, Event from .serializers import CountrySerializer, EventSerializer class CountryViewSet(viewsets.ModelViewSet): queryset = Country.objects.all() serializer_class = CountrySerializer permission_classes = [IsAuthenticatedOrReadOnly] class EventViewSet(viewsets.ModelViewSet): queryset = Event.objects.all() serializer_class = EventSerializer permission_classes = [IsAuthenticatedOrReadOnly]
28.6
64
0.798951
52
572
8.673077
0.5
0.035477
0.075388
0.110865
0
0
0
0
0
0
0
0.002012
0.131119
572
19
65
30.105263
0.905433
0.036713
0
0.166667
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
d3e19347ed0ddda8633be363dd6cfd4b345245b2
402
py
Python
catalog/bindings/gmd/point.py
NIVANorge/s-enda-playground
56ae0a8978f0ba8a5546330786c882c31e17757a
[ "Apache-2.0" ]
null
null
null
catalog/bindings/gmd/point.py
NIVANorge/s-enda-playground
56ae0a8978f0ba8a5546330786c882c31e17757a
[ "Apache-2.0" ]
null
null
null
catalog/bindings/gmd/point.py
NIVANorge/s-enda-playground
56ae0a8978f0ba8a5546330786c882c31e17757a
[ "Apache-2.0" ]
null
null
null
from dataclasses import dataclass from bindings.gmd.point_type import PointType __NAMESPACE__ = "http://www.opengis.net/gml" @dataclass class Point(PointType): """A Point is defined by a single coordinate tuple. The direct position of a point is specified by the pos element which is of type DirectPositionType. """ class Meta: namespace = "http://www.opengis.net/gml"
23.647059
72
0.721393
55
402
5.181818
0.6
0.091228
0.112281
0.161404
0.203509
0.203509
0
0
0
0
0
0
0.196517
402
16
73
25.125
0.882353
0.370647
0
0
0
0
0.222222
0
0
0
0
0
0
1
0
false
0
0.285714
0
0.571429
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
d3ea46bda3dee2d1a7eb7b7fac100d0a90820e25
14,363
py
Python
sdk/python/pulumi_aws_native/amplify/_inputs.py
AaronFriel/pulumi-aws-native
5621690373ac44accdbd20b11bae3be1baf022d1
[ "Apache-2.0" ]
29
2021-09-30T19:32:07.000Z
2022-03-22T21:06:08.000Z
sdk/python/pulumi_aws_native/amplify/_inputs.py
AaronFriel/pulumi-aws-native
5621690373ac44accdbd20b11bae3be1baf022d1
[ "Apache-2.0" ]
232
2021-09-30T19:26:26.000Z
2022-03-31T23:22:06.000Z
sdk/python/pulumi_aws_native/amplify/_inputs.py
AaronFriel/pulumi-aws-native
5621690373ac44accdbd20b11bae3be1baf022d1
[ "Apache-2.0" ]
4
2021-11-10T19:42:01.000Z
2022-02-05T10:15:49.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from ._enums import * __all__ = [ 'AppAutoBranchCreationConfigArgs', 'AppBasicAuthConfigArgs', 'AppCustomRuleArgs', 'AppEnvironmentVariableArgs', 'AppTagArgs', 'BranchBasicAuthConfigArgs', 'BranchEnvironmentVariableArgs', 'BranchTagArgs', 'DomainSubDomainSettingArgs', ] @pulumi.input_type class AppAutoBranchCreationConfigArgs: def __init__(__self__, *, auto_branch_creation_patterns: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, basic_auth_config: Optional[pulumi.Input['AppBasicAuthConfigArgs']] = None, build_spec: Optional[pulumi.Input[str]] = None, enable_auto_branch_creation: Optional[pulumi.Input[bool]] = None, enable_auto_build: Optional[pulumi.Input[bool]] = None, enable_performance_mode: Optional[pulumi.Input[bool]] = None, enable_pull_request_preview: Optional[pulumi.Input[bool]] = None, environment_variables: Optional[pulumi.Input[Sequence[pulumi.Input['AppEnvironmentVariableArgs']]]] = None, pull_request_environment_name: Optional[pulumi.Input[str]] = None, stage: Optional[pulumi.Input['AppAutoBranchCreationConfigStage']] = None): if auto_branch_creation_patterns is not None: pulumi.set(__self__, "auto_branch_creation_patterns", auto_branch_creation_patterns) if basic_auth_config is not None: pulumi.set(__self__, "basic_auth_config", basic_auth_config) if build_spec is not None: pulumi.set(__self__, "build_spec", build_spec) if enable_auto_branch_creation is not None: pulumi.set(__self__, "enable_auto_branch_creation", enable_auto_branch_creation) if enable_auto_build is not None: pulumi.set(__self__, "enable_auto_build", enable_auto_build) if enable_performance_mode is not None: pulumi.set(__self__, "enable_performance_mode", enable_performance_mode) if enable_pull_request_preview is not None: pulumi.set(__self__, "enable_pull_request_preview", enable_pull_request_preview) if environment_variables is not None: pulumi.set(__self__, "environment_variables", environment_variables) if pull_request_environment_name is not None: pulumi.set(__self__, "pull_request_environment_name", pull_request_environment_name) if stage is not None: pulumi.set(__self__, "stage", stage) @property @pulumi.getter(name="autoBranchCreationPatterns") def auto_branch_creation_patterns(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: return pulumi.get(self, "auto_branch_creation_patterns") @auto_branch_creation_patterns.setter def auto_branch_creation_patterns(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "auto_branch_creation_patterns", value) @property @pulumi.getter(name="basicAuthConfig") def basic_auth_config(self) -> Optional[pulumi.Input['AppBasicAuthConfigArgs']]: return pulumi.get(self, "basic_auth_config") @basic_auth_config.setter def basic_auth_config(self, value: Optional[pulumi.Input['AppBasicAuthConfigArgs']]): pulumi.set(self, "basic_auth_config", value) @property @pulumi.getter(name="buildSpec") def build_spec(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "build_spec") @build_spec.setter def build_spec(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "build_spec", value) @property @pulumi.getter(name="enableAutoBranchCreation") def enable_auto_branch_creation(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "enable_auto_branch_creation") @enable_auto_branch_creation.setter def enable_auto_branch_creation(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enable_auto_branch_creation", value) @property @pulumi.getter(name="enableAutoBuild") def enable_auto_build(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "enable_auto_build") @enable_auto_build.setter def enable_auto_build(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enable_auto_build", value) @property @pulumi.getter(name="enablePerformanceMode") def enable_performance_mode(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "enable_performance_mode") @enable_performance_mode.setter def enable_performance_mode(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enable_performance_mode", value) @property @pulumi.getter(name="enablePullRequestPreview") def enable_pull_request_preview(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "enable_pull_request_preview") @enable_pull_request_preview.setter def enable_pull_request_preview(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enable_pull_request_preview", value) @property @pulumi.getter(name="environmentVariables") def environment_variables(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AppEnvironmentVariableArgs']]]]: return pulumi.get(self, "environment_variables") @environment_variables.setter def environment_variables(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AppEnvironmentVariableArgs']]]]): pulumi.set(self, "environment_variables", value) @property @pulumi.getter(name="pullRequestEnvironmentName") def pull_request_environment_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "pull_request_environment_name") @pull_request_environment_name.setter def pull_request_environment_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "pull_request_environment_name", value) @property @pulumi.getter def stage(self) -> Optional[pulumi.Input['AppAutoBranchCreationConfigStage']]: return pulumi.get(self, "stage") @stage.setter def stage(self, value: Optional[pulumi.Input['AppAutoBranchCreationConfigStage']]): pulumi.set(self, "stage", value) @pulumi.input_type class AppBasicAuthConfigArgs: def __init__(__self__, *, enable_basic_auth: Optional[pulumi.Input[bool]] = None, password: Optional[pulumi.Input[str]] = None, username: Optional[pulumi.Input[str]] = None): if enable_basic_auth is not None: pulumi.set(__self__, "enable_basic_auth", enable_basic_auth) if password is not None: pulumi.set(__self__, "password", password) if username is not None: pulumi.set(__self__, "username", username) @property @pulumi.getter(name="enableBasicAuth") def enable_basic_auth(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "enable_basic_auth") @enable_basic_auth.setter def enable_basic_auth(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enable_basic_auth", value) @property @pulumi.getter def password(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "password") @password.setter def password(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "password", value) @property @pulumi.getter def username(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "username") @username.setter def username(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "username", value) @pulumi.input_type class AppCustomRuleArgs: def __init__(__self__, *, source: pulumi.Input[str], target: pulumi.Input[str], condition: Optional[pulumi.Input[str]] = None, status: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "source", source) pulumi.set(__self__, "target", target) if condition is not None: pulumi.set(__self__, "condition", condition) if status is not None: pulumi.set(__self__, "status", status) @property @pulumi.getter def source(self) -> pulumi.Input[str]: return pulumi.get(self, "source") @source.setter def source(self, value: pulumi.Input[str]): pulumi.set(self, "source", value) @property @pulumi.getter def target(self) -> pulumi.Input[str]: return pulumi.get(self, "target") @target.setter def target(self, value: pulumi.Input[str]): pulumi.set(self, "target", value) @property @pulumi.getter def condition(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "condition") @condition.setter def condition(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "condition", value) @property @pulumi.getter def status(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "status") @status.setter def status(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "status", value) @pulumi.input_type class AppEnvironmentVariableArgs: def __init__(__self__, *, name: pulumi.Input[str], value: pulumi.Input[str]): pulumi.set(__self__, "name", name) pulumi.set(__self__, "value", value) @property @pulumi.getter def name(self) -> pulumi.Input[str]: return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter def value(self) -> pulumi.Input[str]: return pulumi.get(self, "value") @value.setter def value(self, value: pulumi.Input[str]): pulumi.set(self, "value", value) @pulumi.input_type class AppTagArgs: def __init__(__self__, *, key: pulumi.Input[str], value: pulumi.Input[str]): pulumi.set(__self__, "key", key) pulumi.set(__self__, "value", value) @property @pulumi.getter def key(self) -> pulumi.Input[str]: return pulumi.get(self, "key") @key.setter def key(self, value: pulumi.Input[str]): pulumi.set(self, "key", value) @property @pulumi.getter def value(self) -> pulumi.Input[str]: return pulumi.get(self, "value") @value.setter def value(self, value: pulumi.Input[str]): pulumi.set(self, "value", value) @pulumi.input_type class BranchBasicAuthConfigArgs: def __init__(__self__, *, password: pulumi.Input[str], username: pulumi.Input[str], enable_basic_auth: Optional[pulumi.Input[bool]] = None): pulumi.set(__self__, "password", password) pulumi.set(__self__, "username", username) if enable_basic_auth is not None: pulumi.set(__self__, "enable_basic_auth", enable_basic_auth) @property @pulumi.getter def password(self) -> pulumi.Input[str]: return pulumi.get(self, "password") @password.setter def password(self, value: pulumi.Input[str]): pulumi.set(self, "password", value) @property @pulumi.getter def username(self) -> pulumi.Input[str]: return pulumi.get(self, "username") @username.setter def username(self, value: pulumi.Input[str]): pulumi.set(self, "username", value) @property @pulumi.getter(name="enableBasicAuth") def enable_basic_auth(self) -> Optional[pulumi.Input[bool]]: return pulumi.get(self, "enable_basic_auth") @enable_basic_auth.setter def enable_basic_auth(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enable_basic_auth", value) @pulumi.input_type class BranchEnvironmentVariableArgs: def __init__(__self__, *, name: pulumi.Input[str], value: pulumi.Input[str]): pulumi.set(__self__, "name", name) pulumi.set(__self__, "value", value) @property @pulumi.getter def name(self) -> pulumi.Input[str]: return pulumi.get(self, "name") @name.setter def name(self, value: pulumi.Input[str]): pulumi.set(self, "name", value) @property @pulumi.getter def value(self) -> pulumi.Input[str]: return pulumi.get(self, "value") @value.setter def value(self, value: pulumi.Input[str]): pulumi.set(self, "value", value) @pulumi.input_type class BranchTagArgs: def __init__(__self__, *, key: pulumi.Input[str], value: pulumi.Input[str]): pulumi.set(__self__, "key", key) pulumi.set(__self__, "value", value) @property @pulumi.getter def key(self) -> pulumi.Input[str]: return pulumi.get(self, "key") @key.setter def key(self, value: pulumi.Input[str]): pulumi.set(self, "key", value) @property @pulumi.getter def value(self) -> pulumi.Input[str]: return pulumi.get(self, "value") @value.setter def value(self, value: pulumi.Input[str]): pulumi.set(self, "value", value) @pulumi.input_type class DomainSubDomainSettingArgs: def __init__(__self__, *, branch_name: pulumi.Input[str], prefix: pulumi.Input[str]): pulumi.set(__self__, "branch_name", branch_name) pulumi.set(__self__, "prefix", prefix) @property @pulumi.getter(name="branchName") def branch_name(self) -> pulumi.Input[str]: return pulumi.get(self, "branch_name") @branch_name.setter def branch_name(self, value: pulumi.Input[str]): pulumi.set(self, "branch_name", value) @property @pulumi.getter def prefix(self) -> pulumi.Input[str]: return pulumi.get(self, "prefix") @prefix.setter def prefix(self, value: pulumi.Input[str]): pulumi.set(self, "prefix", value)
34.609639
124
0.664555
1,652
14,363
5.516949
0.063559
0.126728
0.096774
0.062541
0.742813
0.629142
0.549704
0.483542
0.423085
0.323897
0
0.000089
0.213674
14,363
414
125
34.693237
0.806889
0.011209
0
0.466667
1
0
0.120175
0.069597
0
0
0
0
0
1
0.209091
false
0.045455
0.018182
0.090909
0.345455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
d3eaa974be46c94752b5084755d30c91ec1e2ca1
4,203
py
Python
awsecommerceservice/models/item_lookup_request.py
nidaizamir/Test-PY
26ea1019115a1de3b1b37a4b830525e164ac55ce
[ "MIT" ]
null
null
null
awsecommerceservice/models/item_lookup_request.py
nidaizamir/Test-PY
26ea1019115a1de3b1b37a4b830525e164ac55ce
[ "MIT" ]
null
null
null
awsecommerceservice/models/item_lookup_request.py
nidaizamir/Test-PY
26ea1019115a1de3b1b37a4b830525e164ac55ce
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ awsecommerceservice This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ). """ class ItemLookupRequest(object): """Implementation of the 'ItemLookupRequest' model. TODO: type model description here. Attributes: condition (ConditionEnum): TODO: type description here. id_type (IdTypeEnum): TODO: type description here. merchant_id (string): TODO: type description here. item_id (list of string): TODO: type description here. response_group (list of string): TODO: type description here. search_index (string): TODO: type description here. variation_page (object): TODO: type description here. related_item_page (object): TODO: type description here. relationship_type (list of string): TODO: type description here. include_reviews_summary (string): TODO: type description here. truncate_reviews_at (int): TODO: type description here. """ # Create a mapping from Model property names to API property names _names = { "condition":'Condition', "id_type":'IdType', "merchant_id":'MerchantId', "item_id":'ItemId', "response_group":'ResponseGroup', "search_index":'SearchIndex', "variation_page":'VariationPage', "related_item_page":'RelatedItemPage', "relationship_type":'RelationshipType', "include_reviews_summary":'IncludeReviewsSummary', "truncate_reviews_at":'TruncateReviewsAt' } def __init__(self, condition=None, id_type=None, merchant_id=None, item_id=None, response_group=None, search_index=None, variation_page=None, related_item_page=None, relationship_type=None, include_reviews_summary=None, truncate_reviews_at=None): """Constructor for the ItemLookupRequest class""" # Initialize members of the class self.condition = condition self.id_type = id_type self.merchant_id = merchant_id self.item_id = item_id self.response_group = response_group self.search_index = search_index self.variation_page = variation_page self.related_item_page = related_item_page self.relationship_type = relationship_type self.include_reviews_summary = include_reviews_summary self.truncate_reviews_at = truncate_reviews_at @classmethod def from_dictionary(cls, dictionary): """Creates an instance of this model from a dictionary Args: dictionary (dictionary): A dictionary representation of the object as obtained from the deserialization of the server's response. The keys MUST match property names in the API description. Returns: object: An instance of this structure class. """ if dictionary is None: return None # Extract variables from the dictionary condition = dictionary.get('Condition') id_type = dictionary.get('IdType') merchant_id = dictionary.get('MerchantId') item_id = dictionary.get('ItemId') response_group = dictionary.get('ResponseGroup') search_index = dictionary.get('SearchIndex') variation_page = dictionary.get('VariationPage') related_item_page = dictionary.get('RelatedItemPage') relationship_type = dictionary.get('RelationshipType') include_reviews_summary = dictionary.get('IncludeReviewsSummary') truncate_reviews_at = dictionary.get('TruncateReviewsAt') # Return an object of this model return cls(condition, id_type, merchant_id, item_id, response_group, search_index, variation_page, related_item_page, relationship_type, include_reviews_summary, truncate_reviews_at)
35.618644
83
0.623602
421
4,203
5.995249
0.23753
0.038035
0.082805
0.100238
0.102219
0.06775
0.041601
0
0
0
0
0.001021
0.300738
4,203
117
84
35.923077
0.857775
0.336902
0
0
1
0
0.160972
0.024677
0
0
0
0.102564
0
1
0.03125
false
0
0
0
0.09375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
2
310702fdf551ae6fbf1e07ce937cc37a28affac0
233
py
Python
libra/handlers/user.py
pitomba/libra
8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d
[ "MIT" ]
2
2015-03-22T09:44:36.000Z
2015-06-04T06:30:13.000Z
libra/handlers/user.py
pitomba/libra
8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d
[ "MIT" ]
null
null
null
libra/handlers/user.py
pitomba/libra
8a2e4008f5a6038995ed03ea29d1cbf3dc3c589d
[ "MIT" ]
null
null
null
# coding: utf-8 from tornado.web import RequestHandler from libra.handlers.base import authenticated class UserHandler(RequestHandler): @authenticated def post(self, user, **kwargs): self.write({"msg": "Success"})
21.181818
45
0.716738
27
233
6.185185
0.814815
0
0
0
0
0
0
0
0
0
0
0.005155
0.167382
233
10
46
23.3
0.85567
0.055794
0
0
0
0
0.045872
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
310bacdf46c233952b779b59db8e0cb5aa5c4604
5,068
py
Python
planning/domains/depth/depthGenerator.py
xlbandy/fape
8a00f9d4c20f722930c11d88b60e0e82f523a439
[ "BSD-2-Clause" ]
14
2017-01-09T23:25:12.000Z
2022-02-16T12:08:48.000Z
planning/domains/depth/depthGenerator.py
xlbandy/fape
8a00f9d4c20f722930c11d88b60e0e82f523a439
[ "BSD-2-Clause" ]
7
2018-05-18T08:27:03.000Z
2022-03-23T06:39:42.000Z
planning/domains/depth/depthGenerator.py
xlbandy/fape
8a00f9d4c20f722930c11d88b60e0e82f523a439
[ "BSD-2-Clause" ]
8
2016-12-09T13:31:43.000Z
2022-02-16T12:08:50.000Z
from __future__ import division import itertools import json import math import os import random import shutil import subprocess import sys durationA = str(5) durationB = str(4) durationC = str(1) def main(): if len(sys.argv) > 1: nbDepth = int(sys.argv[1]) if nbDepth < 2 : nbDepth =2 else : nbDepth =2 mainFolder = "depth" if not os.path.exists(mainFolder): subprocess.call(["mkdir", mainFolder]) generateDomain("depth", nbDepth) #print "Every file has been written. Exiting" def generateDomain(folderName, nbDepth): domainFilename = folderName + "/" + folderName + "-flat" + str(nbDepth) + ".dom.anml" printDomainToFile(domainFilename, nbDepth) domainFilename = folderName + "/" + folderName + "-hier" + str(nbDepth) + ".dom.anml" printDomainHierToFile(domainFilename, nbDepth) def printDomainToFile(domainFilename, nbDepth): with open(domainFilename, "w") as f: for i in range(0, nbDepth): f.write("predicate a" + str(i+1) +"();\n") f.write("predicate b" + str(i+1) +"();\n") f.write("predicate c" + str(i+1) +"();\n") f.write("predicate d" + str(i+1) +"();\n") f.write("predicate e" + str(i+1) +"();\n") f.write("\naction An" + str(i+1) + " () {\n") f.write("\tduration := " + durationA + ";\n") if i > 0: f.write("\t[start] {\n") f.write("\t\tb"+ str(i) +" == true;\n") f.write("\t\td"+ str(i) +" == true;\n") f.write("\t\te"+ str(i) +" == true;\n") f.write("\t};\n") f.write("\t[start] a" + str(i+1) + " := true;\n") f.write("\t[end] {\n") f.write("\t\ta" + str(i+1) + " := false;\n") f.write("\t\tb" + str(i+1) + " := true;\n") f.write("\t\td" + str(i+1) + " := false;\n") f.write("\t};\n") f.write("};\n") f.write("\naction Bn" + str(i+1) + " () {\n") f.write("\tduration := " + durationB + ";\n") f.write("\t[start] a" + str(i+1) + " == true;\n") f.write("\t[start] c" + str(i+1) + " := true;\n") f.write("\t[end] {\n") f.write("\t\tc" + str(i+1) + " := false;\n") f.write("\t\td" + str(i+1) + " := true;\n") f.write("\t};\n") f.write("};\n") f.write("\naction Cn" + str(i+1) + " () {\n") f.write("\tduration := " + durationC + ";\n") f.write("\t[start] c" + str(i+1) + " == true;\n") f.write("\t[end] {\n") f.write("\t\tb" + str(i+1) + " := false;\n") f.write("\t\te" + str(i+1) + " := true;\n") f.write("\t};\n") f.write("};\n") ######################## problem ############### f.write("\n/*******Problem************/\n") f.write("[all] contains{\n") f.write("\tCn" + str(nbDepth) +"();\n") f.write("};") def printDomainHierToFile(domainFilename, nbDepth): with open(domainFilename, "w") as f: for i in range(0, nbDepth): if i == 0: f.write("\naction An" + str(i+1) + " () {\n") f.write("\tmotivated;\n") f.write("\tduration := " + durationA + ";\n") f.write("};\n") else: f.write("\naction An" + str(i+1) + " () {\n") f.write("\tmotivated;\n") f.write("\tduration := " + durationA + ";\n") f.write("\ta : ABC" + str(i) + "();\n") f.write("\t end(a) < start;\n") f.write("};\n") f.write("\naction Bn" + str(i+1) + " () {\n") f.write("\tduration := " + durationB + ";\n") f.write("\tmotivated;\n") f.write("};\n") f.write("\naction Cn" + str(i+1) + " () {\n") f.write("\tduration := " + durationC + ";\n") f.write("\tmotivated;\n") f.write("};\n") f.write("\naction ABC" + str(i+1) + " () {\n") f.write("\t[all] contains {\n") f.write("\t\t b" + str(i+1) + " : An" + str(i+1) + "();\n") f.write("\t\t d" + str(i+1) + " : Bn" + str(i+1) + "();\n") f.write("\t\t e" + str(i+1) + " : Cn" + str(i+1) + "();\n") f.write("\t};\n") f.write("\tstart(b" + str(i+1) + ") < start(d" + str(i+1) + ");\n") f.write("\tend(d" + str(i+1) + ") < end(b" + str(i+1) + ");\n") f.write("\tstart(d" + str(i+1) + ") < start(e" + str(i+1) + ");\n") f.write("\tend(e" + str(i+1) + ") < end(d" + str(i+1) + ");\n") f.write("};\n") #################### problem ############# f.write("\n/*******Problem************/\n") f.write("[all] contains{\n") f.write("\tCn" + str(nbDepth) +"();\n") f.write("};") if __name__ == "__main__": main()
36.992701
89
0.414759
635
5,068
3.291339
0.146457
0.209569
0.224402
0.103349
0.637799
0.616268
0.611483
0.502392
0.432057
0.432057
0
0.01468
0.32794
5,068
136
90
37.264706
0.598943
0.017758
0
0.446429
0
0
0.229732
0.013069
0
0
0
0
0
1
0.035714
false
0
0.080357
0
0.116071
0.035714
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
3113f40d512a333a3eafda75e8d9b764160cd806
15,467
py
Python
(19.12.06) Culminating/sprites.py
bly852/ICS3U1
8844321b26027d1612fad7fba88e70a1108de2eb
[ "MIT" ]
null
null
null
(19.12.06) Culminating/sprites.py
bly852/ICS3U1
8844321b26027d1612fad7fba88e70a1108de2eb
[ "MIT" ]
null
null
null
(19.12.06) Culminating/sprites.py
bly852/ICS3U1
8844321b26027d1612fad7fba88e70a1108de2eb
[ "MIT" ]
null
null
null
# course: ICS3U1 2019 # exercise: Culminating Activity # date: 2019-12-06 # student number: 340926187 # name: Brandon Ly # description: Two players (Mr Chun & Mr Pileggi) running around the school # collecting food for the food drive. # sprite classes import pygame import random import math import os from settings import * class Player(pygame.sprite.Sprite): """ player class that contains all data and functions related to the player """ def __init__(self, game, x, y, playerNum): """ initalizes a player sprite when an instance is created in the game parameter, at the x and y paramters, and with the player number """ self.playerNum = playerNum self.groups = game.all_sprites, game.players pygame.sprite.Sprite.__init__(self, self.groups) self.game = game # image selection for each player if self.playerNum == 1: self.image = pygame.transform.rotate(self.game.player1_image, 90) else: self.image = pygame.transform.rotate(self.game.player2_image, 90) self.rect = self.image.get_rect() # setting the players base movement velocity self.velX, self.velY = 0, 0 # setting the players position on the grid self.x = x * tileSize - tileSize self.y = y * tileSize - tileSize # players starting score self.score = 0 # if joysticks are connected, enable joystick controls for the player self.joystick_count = pygame.joystick.get_count() if self.joystick_count > 0: self.joystick_enabled = True else: self.joystick_enabled = False def get_keys(self): """ checks for all keys pressed and changes the players velocity on that axis to the player speed varaiable """ self.velX, self.velY = 0, 0 keys = pygame.key.get_pressed() # player 1 controls if self.playerNum == 1: if keys[pygame.K_a]: self.velX = -player_speed if keys[pygame.K_d]: self.velX = player_speed if keys[pygame.K_w]: self.velY = -player_speed if keys[pygame.K_s]: self.velY = player_speed # player 2 controls else: if keys[pygame.K_LEFT]: self.velX = -player_speed if keys[pygame.K_RIGHT]: self.velX = player_speed if keys[pygame.K_UP]: self.velY = -player_speed if keys[pygame.K_DOWN]: self.velY = player_speed # if moving diagonally reduce the speed if self.velX > 0 and self.velY > 0: self.velX = player_speed * 0.701 self.velY = player_speed * 0.701 elif self.velX < 0 and self.velY < 0: self.velX = player_speed * -0.701 self.velY = player_speed * -0.701 def get_joystick_axis(self): """ changes the velocity of the character in the x and y based on joystick input """ # joystick controls for two seperate controllers if self.joystick_count == 2: # joystick control for player 1 if self.playerNum == 1: # joystick initialization joystick = pygame.joystick.Joystick(1) joystick.init() # different joystick settings for Xbox controllers if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)': # checks for axis movement and changes velX and velY if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0: self.velX += joystick.get_axis(0) * player_speed self.velY += joystick.get_axis(1) * player_speed else: if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0: self.velX += joystick.get_axis(1) * player_speed self.velY -= joystick.get_axis(0) * player_speed # joystick control for player 2 elif self.playerNum == 2: # joystick initialization joystick = pygame.joystick.Joystick(0) joystick.init() # Different joystick settings for Xbox controllers if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)': # checks for axis movement and changes velX and velY if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0: self.velX += joystick.get_axis(0) * player_speed self.velY += joystick.get_axis(1) * player_speed else: if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0: self.velX += joystick.get_axis(1) * player_speed self.velY -= joystick.get_axis(0) * player_speed # joystick controls for a single controller elif self.joystick_count == 1: # joystick control for player 1 if self.playerNum == 1: # joystick initialization joystick = pygame.joystick.Joystick(0) joystick.init() # different joystick settings for Xbox controllers if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)': # checks for axis movement and changes velX and velY if round(joystick.get_axis(0)) != 0 or round(joystick.get_axis(1)) != 0: self.velX += joystick.get_axis(0) * player_speed self.velY += joystick.get_axis(1) * player_speed else: if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0: self.velX += joystick.get_axis(1) * player_speed self.velY -= joystick.get_axis(0) * player_speed # joystick control for player 2 elif self.playerNum == 2: # joystick initialization joystick = pygame.joystick.Joystick(0) joystick.init() # different joystick settings for Xbox controllers if joystick.get_name() == 'Xbox Wireless Controller' or 'Controller (Xbox One For Windows)': # checks for axis movement and changes velX and velY if round(joystick.get_axis(4)) != 0 or round(joystick.get_axis(3)) != 0: self.velX += joystick.get_axis(4) * player_speed self.velY += joystick.get_axis(3) * player_speed else: if round(joystick.get_axis(1)) != 0 or round(joystick.get_axis(0)) != 0: self.velX += joystick.get_axis(2) * player_speed self.velY -= joystick.get_axis(3) * player_speed def direction(self): """ rotates the player sprite based on the current direction and new direction """ # player 1 rotation if self.playerNum == 1: if self.velX > 100: if self.velY < -100: self.image = pygame.transform.rotate(self.game.player1_image, 45) elif self.velY > 100: self.image = pygame.transform.rotate(self.game.player1_image, -45) else: self.image = pygame.transform.rotate(self.game.player1_image, 0) elif self.velX < -100: if self.velY < -100: self.image = pygame.transform.rotate(self.game.player1_image, 135) elif self.velY > 100: self.image = pygame.transform.rotate(self.game.player1_image, -135) else: self.image = pygame.transform.rotate(self.game.player1_image, 180) else: if self.velY < -100: self.image = pygame.transform.rotate(self.game.player1_image, 90) elif self.velY > 100: self.image = pygame.transform.rotate(self.game.player1_image, -90) # player 2 rotation else: if self.velX > 100: if self.velY < -100: self.image = pygame.transform.rotate(self.game.player2_image, 45) elif self.velY > 100: self.image = pygame.transform.rotate(self.game.player2_image, -45) else: self.image = pygame.transform.rotate(self.game.player2_image, 0) elif self.velX < -100: if self.velY < -100: self.image = pygame.transform.rotate(self.game.player2_image, 135) elif self.velY > 100: self.image = pygame.transform.rotate(self.game.player2_image, -135) else: self.image = pygame.transform.rotate(self.game.player2_image, 180) else: if self.velY < -100: self.image = pygame.transform.rotate(self.game.player2_image, 90) elif self.velY > 100: self.image = pygame.transform.rotate(self.game.player2_image, -90) def wall_collision(self, axis): """ checks for player collision with the all wall sprites on the axis given and prevents player movement onto it """ if axis == 'x': collides = pygame.sprite.spritecollide(self, self.game.walls, False) if collides: if self.velX > 0: self.x = collides[0].rect.left - self.rect.width if self.velX < 0: self.x = collides[0].rect.right self.velX = 0 self.rect.x = self.x if axis == 'y': collides = pygame.sprite.spritecollide(self, self.game.walls, False) if collides: if self.velY > 0: self.y = collides[0].rect.top - self.rect.height if self.velY < 0: self.y = collides[0].rect.bottom self.velY = 0 self.rect.y = self.y def player_collision(self, axis): """ checks for player collision with the all wall sprites on the axis given and prevents player movement onto it """ # checks for player 1 collision to player 2 if self.playerNum == 1: if axis == 'x': if self.rect.colliderect(self.game.player2): if self.velX > 0: self.x = self.game.player2.rect.left - self.rect.width if self.velX < 0: self.x = self.game.player2.rect.right self.velX = 0 self.rect.x = self.x if axis == 'y': if self.rect.colliderect(self.game.player2): if self.velY > 0: self.y = self.game.player2.rect.top - self.rect.height if self.velY < 0: self.y = self.game.player2.rect.bottom self.velY = 0 self.rect.y = self.y # checks for player 2 collision to player 1 else: if axis == 'x': if self.rect.colliderect(self.game.player1): if self.velX > 0: self.x = self.game.player1.rect.left - self.rect.width if self.velX < 0: self.x = self.game.player1.rect.right self.velX = 0 self.rect.x = self.x if axis == 'y': if self.rect.colliderect(self.game.player1): if self.velY > 0: self.y = self.game.player1.rect.top - self.rect.height if self.velY < 0: self.y = self.game.player1.rect.bottom self.velY = 0 self.rect.y = self.y def food_collision(self): """ checks for player collision with all food sprites killing any sprites it comes collides with and adding 1 to the players score value """ collides = pygame.sprite.spritecollide(self, self.game.food, True) if collides: self.score += 1 def update(self): """ updates the players position """ self.get_keys() if self.joystick_enabled == True: self.get_joystick_axis() self.direction() self.x += self.velX * self.game.dt self.y += self.velY * self.game.dt self.rect.x = self.x self.wall_collision('x') self.player_collision('x') self.rect.y = self.y self.wall_collision('y') self.player_collision('y') self.food_collision() class Wall(pygame.sprite.Sprite): """ class to contain all the data for wall sprites """ def __init__(self, game, x, y): """ initalizes a wall sprite when an instance is create in the game parameter, at the x and y paramters """ self.groups = game.all_sprites, game.walls pygame.sprite.Sprite.__init__(self, self.groups) self.game = game self.image = game.wall_image self.rect = self.image.get_rect() self.x = x self.y = y self.rect.x = x * tileSize self.rect.y = y * tileSize class Floor(pygame.sprite.Sprite): """ class to contain all the data for floor sprites """ def __init__(self, game, x, y): """ initalizes a floor sprite when an instance is created in the game parameter, at the x and y paramters """ self.groups = game.all_sprites, game.floor pygame.sprite.Sprite.__init__(self, self.groups) self.game = game self.image = game.floor_image self.rect = self.image.get_rect() self.x = x self.y = y self.rect.x = x * tileSize self.rect.y = y * tileSize class Food(pygame.sprite.Sprite): """ class to contain all the data for food sprites """ def __init__(self, game, x, y): """ initalizes a food sprite when an instance is created in the game parameter, at the x and y paramters """ self.groups = game.all_sprites, game.food pygame.sprite.Sprite.__init__(self, self.groups) self.game = game # picks random image for the sprite self.image = pygame.image.load(os.path.join(food_folder, (random.choice(food_images)))).convert_alpha() self.rect = self.image.get_rect() self.x = x self.y = y self.rect.x = x * tileSize self.rect.y = y * tileSize # checks if the sprite is allowed to spawn in the x and y self.spawnable = False collided = pygame.sprite.spritecollide(self, self.game.floor, False) for sprite in collided: if self.x == sprite.x and self.y == sprite.y: self.spawnable = True if self.spawnable == False: self.kill()
40.174026
140
0.541799
1,854
15,467
4.434736
0.104639
0.042812
0.05838
0.052542
0.740452
0.719898
0.696911
0.691438
0.668086
0.636342
0
0.027068
0.36704
15,467
384
141
40.278646
0.812768
0.179026
0
0.599206
0
0
0.019392
0
0
0
0
0
0
1
0.043651
false
0
0.019841
0
0.079365
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
311469936b15c039449f088fcb08c289febfdf41
294
py
Python
app/schemas/usage_logs.py
wiki-yu/fastapi-algorithm-library
8f745e9fe4d1d063dc8505d4c7f467e95209a385
[ "MIT" ]
null
null
null
app/schemas/usage_logs.py
wiki-yu/fastapi-algorithm-library
8f745e9fe4d1d063dc8505d4c7f467e95209a385
[ "MIT" ]
null
null
null
app/schemas/usage_logs.py
wiki-yu/fastapi-algorithm-library
8f745e9fe4d1d063dc8505d4c7f467e95209a385
[ "MIT" ]
null
null
null
from typing import Optional, List from pydantic import BaseModel class UsageLog(BaseModel): api_key: str is_active: bool never_expire: bool expiration_date: str latest_query_date: Optional[str] total_queries: int class UsageLogs(BaseModel): logs: List[UsageLog]
18.375
36
0.738095
38
294
5.526316
0.684211
0
0
0
0
0
0
0
0
0
0
0
0.20068
294
15
37
19.6
0.893617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.181818
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
2
3117d8dd620c63b6e9601bc884ac493aa6485d06
1,100
py
Python
tweetf0rm/process/crawler_process.py
amaurywalbert/mytweetf0rm
2272b53214b3669eb104762f5b5b38ff4adda435
[ "MIT" ]
1
2015-02-16T11:08:35.000Z
2015-02-16T11:08:35.000Z
tweetf0rm/process/crawler_process.py
maruthiprithivi/tweetf0rm
f59e57495afda05032d41b161b5aed74f2bc4dfe
[ "MIT" ]
null
null
null
tweetf0rm/process/crawler_process.py
maruthiprithivi/tweetf0rm
f59e57495afda05032d41b161b5aed74f2bc4dfe
[ "MIT" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- # import logging logger = logging.getLogger(__name__) import multiprocessing as mp import tweetf0rm.handler from tweetf0rm.redis_helper import CrawlerQueue #MAX_QUEUE_SIZE = 32767 class CrawlerProcess(mp.Process): def __init__(self, node_id, crawler_id, redis_config, handlers): super(CrawlerProcess, self).__init__() self.node_id = node_id self.crawler_id = crawler_id self.redis_config = redis_config #self.queue = mp.Queue(maxsize=MAX_QUEUE_SIZE) self.crawler_queue = CrawlerQueue(node_id, crawler_id, redis_config=redis_config) self.crawler_queue.clear() #self.lock = mp.Lock() self.handlers = handlers logger.debug("number of handlers attached: %d"%(len(handlers))) def get_crawler_id(self): return self.crawler_id def enqueue(self, request): #self.queue.put(request, block=True) self.crawler_queue.put(request) return True def get_cmd(self): #return self.queue.get(block=True) return self.crawler_queue.get(block=True) def get_queue_size(self): self.crawler_queue.qsize() def run(self): pass
22.916667
83
0.75
158
1,100
4.962025
0.35443
0.098214
0.102041
0.035714
0.118622
0.066327
0
0
0
0
0
0.008403
0.134545
1,100
48
84
22.916667
0.815126
0.178182
0
0
0
0
0.03456
0
0
0
0
0
0
1
0.230769
false
0.038462
0.153846
0.076923
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
312a37dbf8253fa5df799a76f2660e8811afe2b8
1,823
py
Python
sympyosis/logger.py
ZechCodes/sympyosis
0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1
[ "MIT" ]
null
null
null
sympyosis/logger.py
ZechCodes/sympyosis
0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1
[ "MIT" ]
null
null
null
sympyosis/logger.py
ZechCodes/sympyosis
0c7315a08fc91d2d074b42f0aeb5d04c6f3f22d1
[ "MIT" ]
null
null
null
from enum import IntEnum from typing import Type, TypeVar import logging T = TypeVar("T") class LogLevel(IntEnum): DEBUG = logging.DEBUG INFO = logging.INFO WARNING = logging.WARNING ERROR = logging.ERROR CRITICAL = logging.CRITICAL @classmethod def get(cls: Type[T], name: str) -> T: return getattr(cls, name.upper()) class Logger: def __init__( self, name: str, level: LogLevel, *, parent: logging.Logger | None = None ): self._name = name self._level = level self._parent = parent if parent: self._logger = self._parent.getChild(self._name) else: self._logger = logging.getLogger(name) self.set_level(self._level) def log(self, message: str, level: LogLevel, *args, **kwargs): self._logger.log(level, message, *args, **kwargs) def debug(self, message: str, *args, **kwargs): self.log(message, LogLevel.DEBUG, *args, **kwargs) def info(self, message: str, *args, **kwargs): self.log(message, LogLevel.INFO, *args, **kwargs) def warning(self, message: str, *args, **kwargs): self.log(message, LogLevel.WARNING, *args, **kwargs) def error(self, message: str, *args, **kwargs): self.log(message, LogLevel.ERROR, *args, **kwargs) def critical(self, message: str, *args, **kwargs): self.log(message, LogLevel.CRITICAL, *args, **kwargs) def set_level(self, level: LogLevel): self._level = level self._logger.setLevel(level) def create_child_logger(self, name: str, level: LogLevel | None = None): return Logger(name, self._level, parent=level or self._logger) @staticmethod def initialize_loggers(level: LogLevel = LogLevel.ERROR): logging.basicConfig(level=level)
28.484375
81
0.633571
221
1,823
5.126697
0.217195
0.105914
0.074139
0.079435
0.245366
0.203001
0.203001
0.203001
0.203001
0
0
0
0.237521
1,823
63
82
28.936508
0.815108
0
0
0.044444
0
0
0.000549
0
0
0
0
0
0
1
0.244444
false
0
0.066667
0.044444
0.511111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
312efdddb68056b4177eee4701aa3c39ea0d5fe6
6,656
py
Python
tests/func/test_pipeline.py
kacmak7/dvc
7f92cc3be31f55a1d47c56fc5a39896dd5d1e313
[ "Apache-2.0" ]
null
null
null
tests/func/test_pipeline.py
kacmak7/dvc
7f92cc3be31f55a1d47c56fc5a39896dd5d1e313
[ "Apache-2.0" ]
null
null
null
tests/func/test_pipeline.py
kacmak7/dvc
7f92cc3be31f55a1d47c56fc5a39896dd5d1e313
[ "Apache-2.0" ]
null
null
null
import logging from dvc.main import main from tests.basic_env import TestDvc from tests.func.test_repro import TestRepro from tests.func.test_repro import TestReproChangedDeepData class TestPipelineShowSingle(TestDvc): def setUp(self): super().setUp() self.stage = "foo.dvc" ret = main(["add", self.FOO]) self.assertEqual(ret, 0) def test(self): ret = main(["pipeline", "show", self.stage]) self.assertEqual(ret, 0) def test_commands(self): ret = main(["pipeline", "show", self.stage, "--commands"]) self.assertEqual(ret, 0) def test_outs(self): ret = main(["pipeline", "show", self.stage, "--outs"]) self.assertEqual(ret, 0) def test_dot(self): ret = main(["pipeline", "show", "--dot", self.stage]) self.assertEqual(ret, 0) def test_tree(self): ret = main(["pipeline", "show", "--tree", self.stage]) self.assertEqual(ret, 0) def test_ascii_outs(self): ret = main(["pipeline", "show", "--ascii", self.stage, "--outs"]) self.assertEqual(ret, 0) def test_dot_commands(self): ret = main(["pipeline", "show", "--dot", self.stage, "--commands"]) self.assertEqual(ret, 0) def test_dot_outs(self): ret = main(["pipeline", "show", "--dot", self.stage, "--outs"]) self.assertEqual(ret, 0) def test_not_dvc_file(self): ret = main(["pipeline", "show", self.FOO]) self.assertNotEqual(ret, 0) def test_non_existing(self): ret = main(["pipeline", "show", "non-existing"]) self.assertNotEqual(ret, 0) def test_single_ascii(repo_dir, dvc_repo): dvc_repo.add(repo_dir.FOO) assert main(["pipeline", "show", "--ascii", "foo.dvc"]) == 0 def test_single_ascii_commands(repo_dir, dvc_repo): dvc_repo.add(repo_dir.FOO) assert main(["pipeline", "show", "--ascii", "foo.dvc", "--commands"]) == 0 class TestPipelineShow(TestRepro): def test(self): ret = main(["pipeline", "show", self.file1_stage]) self.assertEqual(ret, 0) def test_commands(self): ret = main(["pipeline", "show", self.file1_stage, "--commands"]) self.assertEqual(ret, 0) def test_ascii(self): ret = main(["pipeline", "show", "--ascii", self.file1_stage]) self.assertEqual(ret, 0) def test_dot(self): ret = main(["pipeline", "show", "--dot", self.file1_stage]) self.assertEqual(ret, 0) def test_ascii_commands(self): ret = main( ["pipeline", "show", "--ascii", self.file1_stage, "--commands"] ) self.assertEqual(ret, 0) def test_ascii_outs(self): ret = main(["pipeline", "show", "--ascii", self.file1_stage, "--outs"]) self.assertEqual(ret, 0) def test_dot_commands(self): ret = main( ["pipeline", "show", "--dot", self.file1_stage, "--commands"] ) self.assertEqual(ret, 0) def test_print_locked_stages(repo_dir, dvc_repo, caplog): dvc_repo.add("foo") dvc_repo.add("bar") dvc_repo.lock_stage("foo.dvc") caplog.clear() with caplog.at_level(logging.INFO, logger="dvc"): assert main(["pipeline", "show", "foo.dvc", "--locked"]) == 0 assert "foo.dvc" in caplog.text assert "bar.dvc" not in caplog.text def test_dot_outs(repo_dir, dvc_repo): dvc_repo.add(repo_dir.FOO) dvc_repo.run( outs=["file"], deps=[repo_dir.FOO, repo_dir.CODE], cmd="python {} {} {}".format(repo_dir.CODE, repo_dir.FOO, "file"), ) assert main(["pipeline", "show", "--dot", "file.dvc", "--outs"]) == 0 class TestPipelineShowOuts(TestRepro): def setUp(self): super().setUp() def test_outs(self): ret = main(["pipeline", "show", self.file1_stage, "--outs"]) self.assertEqual(ret, 0) class TestPipelineShowDeep(TestReproChangedDeepData): def test(self): ret = main(["pipeline", "show", self.file1_stage]) self.assertEqual(ret, 0) def test_commands(self): ret = main(["pipeline", "show", self.file1_stage, "--commands"]) self.assertEqual(ret, 0) def test_outs(self): ret = main(["pipeline", "show", self.file1_stage, "--outs"]) self.assertEqual(ret, 0) def test_ascii(self): ret = main(["pipeline", "show", "--ascii", self.file1_stage]) self.assertEqual(ret, 0) def test_dot(self): ret = main(["pipeline", "show", "--dot", self.file1_stage]) self.assertEqual(ret, 0) def test_ascii_commands(self): ret = main( ["pipeline", "show", "--ascii", self.file1_stage, "--commands"] ) self.assertEqual(ret, 0) def test_ascii_outs(self): ret = main(["pipeline", "show", "--ascii", self.file1_stage, "--outs"]) self.assertEqual(ret, 0) def test_dot_commands(self): ret = main( ["pipeline", "show", "--dot", self.file1_stage, "--commands"] ) self.assertEqual(ret, 0) def test_dot_outs(self): ret = main(["pipeline", "show", "--dot", self.file1_stage, "--outs"]) self.assertEqual(ret, 0) class TestPipelineListEmpty(TestDvc): def test(self): ret = main(["pipeline", "list"]) self.assertEqual(ret, 0) class TestPipelineListSingle(TestPipelineShowDeep): def test(self): ret = main(["pipeline", "list"]) self.assertEqual(ret, 0) class TestDvcRepoPipeline(TestDvc): def test_no_stages(self): pipelines = self.dvc.pipelines self.assertEqual(len(pipelines), 0) def one_pipeline(self): self.dvc.add("foo") self.dvc.run(deps=["foo"], outs=["bar"], cmd="") self.dvc.run(deps=["bar"], outs=["baz"], cmd="echo baz > baz") pipelines = self.dvc.pipelines self.assertEqual(len(pipelines), 1) self.assertEqual(pipelines[0].nodes, 3) self.assertEqual(pipelines[0].edges, 2) def two_pipelines(self): self.dvc.add("foo") self.dvc.run(deps=["foo"], outs=["bar"], cmd="") self.dvc.run(deps=["bar"], outs=["baz"], cmd="echo baz > baz") self.dvc.add("code.py") pipelines = self.dvc.pipelines self.assertEqual(len(pipelines), 2) self.assertEqual(pipelines[0].nodes, 3) self.assertEqual(pipelines[0].edges, 2) self.assertEqual(pipelines[1].nodes, 1) self.assertEqual(pipelines[1].edges, 0) def locked_stage(self): self.dvc.add("foo") self.dvc.lock_stage("foo.dvc") pipelines = self.dvc.pipelines self.assertEqual(len(pipelines), 0)
29.847534
79
0.594351
826
6,656
4.671913
0.100484
0.147707
0.128531
0.142783
0.757191
0.726095
0.684633
0.678414
0.632288
0.584607
0
0.013096
0.23137
6,656
222
80
29.981982
0.741204
0
0
0.621951
0
0
0.126352
0
0
0
0
0
0.280488
1
0.237805
false
0
0.030488
0
0.310976
0.006098
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
3132c79a2cad6d01993855975464a0c7d164ed0d
898
py
Python
src/apiron/service/discoverable.py
tushar-deepsource/apiron
6b542d498e1e2a76d5f8a2d086d237be43d09bc3
[ "MIT" ]
109
2018-10-01T19:38:36.000Z
2022-03-10T05:28:34.000Z
src/apiron/service/discoverable.py
tushar-deepsource/apiron
6b542d498e1e2a76d5f8a2d086d237be43d09bc3
[ "MIT" ]
39
2018-10-01T20:51:49.000Z
2022-03-07T15:38:32.000Z
src/apiron/service/discoverable.py
tushar-deepsource/apiron
6b542d498e1e2a76d5f8a2d086d237be43d09bc3
[ "MIT" ]
10
2018-10-02T06:54:40.000Z
2020-05-28T14:30:12.000Z
from typing import List, Type from apiron.service.base import ServiceBase class DiscoverableService(ServiceBase): """ A Service whose hosts are determined via a host resolver. A host resolver is any class with a :func:`resolve` method that takes a service name as its sole argument and returns a list of host names that correspond to that service. """ host_resolver_class: Type service_name: str @classmethod def get_hosts(cls) -> List[str]: return cls.host_resolver_class.resolve(cls.service_name) def __str__(self) -> str: return self.service_name def __repr__(self) -> str: klass = self.__class__ return "{klass}(service_name={service_name}, host_resolver={host_resolver})".format( klass=klass.__name__, service_name=klass.service_name, host_resolver=klass.host_resolver_class.__name__ )
30.965517
115
0.707127
120
898
4.983333
0.4
0.160535
0.085284
0.076923
0
0
0
0
0
0
0
0
0.213808
898
28
116
32.071429
0.847026
0.255011
0
0
0
0
0.104524
0.102964
0
0
0
0
0
1
0.2
false
0
0.133333
0.133333
0.733333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
314bff70a566f2c7c9e67a0b7e9a88c99668d8c8
857
py
Python
ois_api_client/v2_0/deserialization/deserialize_invoice_number_query.py
peterkulik/ois_api_client
51dabcc9f920f89982c4419bb058f5a88193cee0
[ "MIT" ]
7
2020-10-22T08:15:29.000Z
2022-01-27T07:59:39.000Z
ois_api_client/v2_0/deserialization/deserialize_invoice_number_query.py
peterkulik/ois_api_client
51dabcc9f920f89982c4419bb058f5a88193cee0
[ "MIT" ]
null
null
null
ois_api_client/v2_0/deserialization/deserialize_invoice_number_query.py
peterkulik/ois_api_client
51dabcc9f920f89982c4419bb058f5a88193cee0
[ "MIT" ]
null
null
null
from typing import Optional import xml.etree.ElementTree as ET from ...xml.XmlReader import XmlReader as XR from ..namespaces import API from ..namespaces import DATA from ...deserialization.create_enum import create_enum from ..dto.InvoiceNumberQuery import InvoiceNumberQuery from ..dto.InvoiceDirection import InvoiceDirection def deserialize_invoice_number_query(element: ET.Element) -> Optional[InvoiceNumberQuery]: if element is None: return None result = InvoiceNumberQuery( invoice_number=XR.get_child_text(element, 'invoiceNumber', API), invoice_direction=create_enum(InvoiceDirection, XR.get_child_text(element, 'invoiceDirection', API)), batch_index=XR.get_child_int(element, 'batchIndex', API), supplier_tax_number=XR.get_child_text(element, 'supplierTaxNumber', API), ) return result
37.26087
109
0.771295
102
857
6.294118
0.421569
0.031153
0.062305
0.065421
0.116822
0.084112
0
0
0
0
0
0
0.144691
857
22
110
38.954545
0.875853
0
0
0
0
0
0.065344
0
0
0
0
0
0
1
0.055556
false
0
0.444444
0
0.611111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
315b8d418921022e63456c61eb0e983243286ff8
245
py
Python
head_first_v2/ch4/modules/setup.py
alex-d-bondarev/learn-python
b119cb1e09a57e93abc73383c014cc8ceba18acf
[ "MIT" ]
null
null
null
head_first_v2/ch4/modules/setup.py
alex-d-bondarev/learn-python
b119cb1e09a57e93abc73383c014cc8ceba18acf
[ "MIT" ]
null
null
null
head_first_v2/ch4/modules/setup.py
alex-d-bondarev/learn-python
b119cb1e09a57e93abc73383c014cc8ceba18acf
[ "MIT" ]
null
null
null
from setuptools import setup setup( name='lsearch', version='1.0', description='The Head First Python Search Tools', author='HF Python 2e', author_email='hfpy2e@gmail.com', url='headfirstlabs.com', py_modules=['lsearch'], )
24.5
109
0.685714
32
245
5.1875
0.84375
0
0
0
0
0
0
0
0
0
0
0.019608
0.167347
245
9
110
27.222222
0.794118
0
0
0
0
0
0.391837
0
0
0
0
0
0
1
0
true
0
0.125
0
0.125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
315c82c7ef0cdf26d1ed8dc3787880aa67476f2f
1,329
py
Python
stability/stairs_contacts.py
haudren/stability-polygon
b5e42bbd6eda2426a2c0d70716fbf956ce63f290
[ "MIT" ]
null
null
null
stability/stairs_contacts.py
haudren/stability-polygon
b5e42bbd6eda2426a2c0d70716fbf956ce63f290
[ "MIT" ]
null
null
null
stability/stairs_contacts.py
haudren/stability-polygon
b5e42bbd6eda2426a2c0d70716fbf956ce63f290
[ "MIT" ]
null
null
null
import numpy as np pos = [] normals = [] p = [[-0.4722227, -0.24517583, -0.6370031]] n = [[2.02215104e-04, -3.23903880e-05, 9.99999979e-01]] pos.append(p) normals.append(n) p = [[-0.2549828, -0.24587737, -0.63704705]] n = [[2.02215104e-04, -3.23903880e-05, 9.99999979e-01]] pos.append(p) normals.append(n) p = [[-0.25787751, -0.38255749, -0.63705089]] n = [[2.02215104e-04, -3.23903880e-05, 9.99999979e-01]] pos.append(p) normals.append(n) p = [[-0.47206733, -0.38317576, -0.6370076]] n = [[2.02215104e-04, -3.23903880e-05, 9.99999979e-01]] pos.append(p) normals.append(n) #Contact lgripper/handrail #Left p = [[0.3651077, 0.33419711, 0.63609439]] n = [[-3.39491173e-05, 9.99999875e-01, 4.99472000e-04]] pos.append(p) normals.append(n) #Right #p = [[0.36510907, 0.29419711, 0.63607441]] #p = [[0.3651077, 0.33419711, 0.63609439]] #n = [[3.44761855e-05, -9.99999874e-01, -5.00077386e-04]] #pos.append(p) #normals.append(n) #Bottom #p = [[0.34212609, 0.31418314, 0.66248165]] #n = [[-6.56636734e-01, -3.99160434e-04, 7.54206895e-01]] #pos.append(p) #normals.append(n) # ##Top p = [[0.38480749, 0.31420908, 0.61345819]] n = [[6.56636734e-01, 4.00439950e-04, -7.54206894e-01]] pos.append(p) normals.append(n) pos = [np.array(px).T for px in pos] #for p in pos: # p[2, 0] = 0.0 normals = [np.array(nx).T for nx in normals]
23.315789
57
0.653123
231
1,329
3.757576
0.307359
0.020737
0.092166
0.156682
0.474654
0.474654
0.474654
0.354839
0.354839
0.288018
0
0.429057
0.114372
1,329
56
58
23.732143
0.308411
0.2769
0
0.551724
0
0
0
0
0
0
0
0
0
1
0
false
0
0.034483
0
0.034483
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
3169e4d8e7a2dd174944cc17f672b8a7919b6ebd
1,928
py
Python
targhe/models.py
luca772005/studio
8d19d28f13f400aa4dde84c36e44cf5891d18ddd
[ "MIT" ]
null
null
null
targhe/models.py
luca772005/studio
8d19d28f13f400aa4dde84c36e44cf5891d18ddd
[ "MIT" ]
null
null
null
targhe/models.py
luca772005/studio
8d19d28f13f400aa4dde84c36e44cf5891d18ddd
[ "MIT" ]
null
null
null
from django.db import models # Create your models here. class Tipo(models.Model): descrizione = models.CharField(blank=False, null=False, max_length=128) def __unicode__(self): return "{}".format(self.descrizione) class Meta: verbose_name_plural = 'Tipi' class Marca(models.Model): descrizione = models.CharField(blank=False, null=False, max_length=128) def __unicode__(self): return "{}".format(self.descrizione) class Meta: verbose_name_plural = 'Marche' class Modello(models.Model): descrizione = models.CharField(blank=False, null=False, max_length=128) marca = models.ForeignKey(Marca, null=False, blank=False) tipo = models.ForeignKey(Tipo, null=False, blank=False) def __unicode__(self): return "{}".format(self.descrizione) class Meta: verbose_name_plural = 'Modelli' class Alimentazione(models.Model): descrizione = models.CharField(blank=False, null=False, max_length=128) def __unicode__(self): return "{}".format(self.descrizione) class Meta: verbose_name_plural = 'Alimentazioni' class Mezzo(models.Model): telaio = models.CharField(blank=False, null=False, max_length=128) colore = models.CharField(blank=False, null=False, max_length=128) alimentazione = models.ForeignKey(Alimentazione, null=False, blank=False) modello = models.ForeignKey(Modello, null=False, blank=False) def __unicode__(self): return "{} {}".format(self.telaio, self.modello) class Meta: verbose_name_plural = 'Mezzi' class Targa(models.Model): numero = models.CharField(null=False, blank=False, max_length=16) dal = models.DateField() al = models.DateField() mezzo = models.ForeignKey(Mezzo, null=False, blank=False) def __unicode__(self): return "{}".format(self.numero) class Meta: verbose_name_plural = 'Targhe'
26.410959
77
0.688278
229
1,928
5.606987
0.20524
0.093458
0.076324
0.116822
0.623832
0.583333
0.583333
0.583333
0.583333
0.511682
0
0.012829
0.19139
1,928
72
78
26.777778
0.810776
0.012448
0
0.444444
0
0
0.029443
0
0
0
0
0
0
1
0.133333
false
0
0.022222
0.133333
0.866667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
316f355464f64ade9dfc879dc379fa3194ccd5a6
363
py
Python
objettoqt/mixins.py
brunonicko/objettoqt
1a91ef58d4540b7f377e405492d35ccd222d71d5
[ "MIT" ]
null
null
null
objettoqt/mixins.py
brunonicko/objettoqt
1a91ef58d4540b7f377e405492d35ccd222d71d5
[ "MIT" ]
null
null
null
objettoqt/mixins.py
brunonicko/objettoqt
1a91ef58d4540b7f377e405492d35ccd222d71d5
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Mix-in classes for `Qt` types.""" from ._mixins import ( OQAbstractItemModelMixin, OQAbstractItemViewMixin, OQObjectMixin, OQWidgetMixin, ) from ._views import OQListViewMixin __all__ = [ "OQObjectMixin", "OQWidgetMixin", "OQAbstractItemModelMixin", "OQAbstractItemViewMixin", "OQListViewMixin", ]
19.105263
36
0.683196
26
363
9.307692
0.730769
0.38843
0
0
0
0
0
0
0
0
0
0.003413
0.192837
363
18
37
20.166667
0.822526
0.146006
0
0
0
0
0.289474
0.154605
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
31789285a83d618d1865746d4dfadf7a34d38be3
403
py
Python
examples/circuitplayground_light_plotter.py
sommersoft/Adafruit_CircuitPython_CircuitPlayground
418ca982b34759b2804c2e816cdb505b2b818135
[ "MIT" ]
null
null
null
examples/circuitplayground_light_plotter.py
sommersoft/Adafruit_CircuitPython_CircuitPlayground
418ca982b34759b2804c2e816cdb505b2b818135
[ "MIT" ]
null
null
null
examples/circuitplayground_light_plotter.py
sommersoft/Adafruit_CircuitPython_CircuitPlayground
418ca982b34759b2804c2e816cdb505b2b818135
[ "MIT" ]
null
null
null
"""If you're using Mu, this example will plot the light levels from the light sensor (located next to the eye) on your Circuit Playground. Try shining a flashlight on your Circuit Playground, or covering the light sensor to see the plot increase and decrease.""" import time from adafruit_circuitplayground import cp while True: print("Light:", cp.light) print((cp.light,)) time.sleep(0.1)
36.636364
98
0.751861
65
403
4.646154
0.661538
0.07947
0.092715
0.152318
0
0
0
0
0
0
0
0.005988
0.171216
403
10
99
40.3
0.898204
0.635236
0
0
0
0
0.042553
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0.333333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
31b9a252afcfedd5b4624ba9c3b0dea6a7505e81
10,253
py
Python
model_zoo/official/cv/FCN8s/src/nets/FCN8s.py
LottieWang/mindspore
1331c7e432fb691d1cfa625ab7cc7451dcfc7ce0
[ "Apache-2.0" ]
null
null
null
model_zoo/official/cv/FCN8s/src/nets/FCN8s.py
LottieWang/mindspore
1331c7e432fb691d1cfa625ab7cc7451dcfc7ce0
[ "Apache-2.0" ]
null
null
null
model_zoo/official/cv/FCN8s/src/nets/FCN8s.py
LottieWang/mindspore
1331c7e432fb691d1cfa625ab7cc7451dcfc7ce0
[ "Apache-2.0" ]
null
null
null
# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ import mindspore.nn as nn from mindspore.ops import operations as P class FCN8s(nn.Cell): def __init__(self, n_class): super().__init__() self.n_class = n_class self.conv1 = nn.SequentialCell( nn.Conv2d(in_channels=3, out_channels=64, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(64), nn.ReLU(), nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(64), nn.ReLU() ) self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2) self.conv2 = nn.SequentialCell( nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(128), nn.ReLU(), nn.Conv2d(in_channels=128, out_channels=128, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(128), nn.ReLU() ) self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2) self.conv3 = nn.SequentialCell( nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(256), nn.ReLU() ) self.pool3 = nn.MaxPool2d(kernel_size=2, stride=2) self.conv4 = nn.SequentialCell( nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(512), nn.ReLU() ) self.pool4 = nn.MaxPool2d(kernel_size=2, stride=2) self.conv5 = nn.SequentialCell( nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, weight_init='xavier_uniform'), nn.BatchNorm2d(512), nn.ReLU() ) self.pool5 = nn.MaxPool2d(kernel_size=2, stride=2) self.conv6 = nn.SequentialCell( nn.Conv2d(in_channels=512, out_channels=4096, kernel_size=7, weight_init='xavier_uniform'), nn.BatchNorm2d(4096), nn.ReLU(), ) self.conv7 = nn.SequentialCell( nn.Conv2d(in_channels=4096, out_channels=4096, kernel_size=1, weight_init='xavier_uniform'), nn.BatchNorm2d(4096), nn.ReLU(), ) self.score_fr = nn.Conv2d(in_channels=4096, out_channels=self.n_class, kernel_size=1, weight_init='xavier_uniform') self.upscore2 = nn.Conv2dTranspose(in_channels=self.n_class, out_channels=self.n_class, kernel_size=4, stride=2, weight_init='xavier_uniform') self.score_pool4 = nn.Conv2d(in_channels=512, out_channels=self.n_class, kernel_size=1, weight_init='xavier_uniform') self.upscore_pool4 = nn.Conv2dTranspose(in_channels=self.n_class, out_channels=self.n_class, kernel_size=4, stride=2, weight_init='xavier_uniform') self.score_pool3 = nn.Conv2d(in_channels=256, out_channels=self.n_class, kernel_size=1, weight_init='xavier_uniform') self.upscore8 = nn.Conv2dTranspose(in_channels=self.n_class, out_channels=self.n_class, kernel_size=16, stride=8, weight_init='xavier_uniform') self.shape = P.Shape() self.cast = P.Cast() def set_model_parallel_shard_strategy(self, device_num): self.conv2d_strategy = ((1, 1, 1, device_num), (1, 1, 1, 1)) self.bn_strategy = ((1, 1, 1, device_num), (1,), (1,), (1,), (1,)) self.relu_strategy = ((1, 1, 1, device_num),) self.maxpool_strategy = ((1, 1, 1, device_num),) self.add_strategy = ((1, 1, 1, device_num), (1, 1, 1, device_num)) self.conv1.cell_list[0].conv2d.shard(self.conv2d_strategy) self.conv1.cell_list[1].bn_train.shard(self.bn_strategy) self.conv1.cell_list[2].relu.shard(self.relu_strategy) self.conv1.cell_list[3].conv2d.shard(self.conv2d_strategy) self.conv1.cell_list[4].bn_train.shard(self.bn_strategy) self.conv1.cell_list[5].relu.shard(self.relu_strategy) self.pool1.max_pool.shard(self.maxpool_strategy) self.conv2.cell_list[0].conv2d.shard(self.conv2d_strategy) self.conv2.cell_list[1].bn_train.shard(self.bn_strategy) self.conv2.cell_list[2].relu.shard(self.relu_strategy) self.conv2.cell_list[3].conv2d.shard(self.conv2d_strategy) self.conv2.cell_list[4].bn_train.shard(self.bn_strategy) self.conv2.cell_list[5].relu.shard(self.relu_strategy) self.pool2.max_pool.shard(self.maxpool_strategy) self.conv3.cell_list[0].conv2d.shard(self.conv2d_strategy) self.conv3.cell_list[1].bn_train.shard(self.bn_strategy) self.conv3.cell_list[2].relu.shard(self.relu_strategy) self.conv3.cell_list[3].conv2d.shard(self.conv2d_strategy) self.conv3.cell_list[4].bn_train.shard(self.bn_strategy) self.conv3.cell_list[5].relu.shard(self.relu_strategy) self.conv3.cell_list[6].conv2d.shard(self.conv2d_strategy) self.conv3.cell_list[7].bn_train.shard(self.bn_strategy) self.conv3.cell_list[8].relu.shard(self.relu_strategy) self.pool3.max_pool.shard(self.maxpool_strategy) self.conv4.cell_list[0].conv2d.shard(self.conv2d_strategy) self.conv4.cell_list[1].bn_train.shard(self.bn_strategy) self.conv4.cell_list[2].relu.shard(self.relu_strategy) self.conv4.cell_list[3].conv2d.shard(self.conv2d_strategy) self.conv4.cell_list[4].bn_train.shard(self.bn_strategy) self.conv4.cell_list[5].relu.shard(self.relu_strategy) self.conv4.cell_list[6].conv2d.shard(self.conv2d_strategy) self.conv4.cell_list[7].bn_train.shard(self.bn_strategy) self.conv4.cell_list[8].relu.shard(self.relu_strategy) self.pool4.max_pool.shard(self.maxpool_strategy) self.conv5.cell_list[0].conv2d.shard(self.conv2d_strategy) self.conv5.cell_list[1].bn_train.shard(self.bn_strategy) self.conv5.cell_list[2].relu.shard(self.relu_strategy) self.conv5.cell_list[3].conv2d.shard(self.conv2d_strategy) self.conv5.cell_list[4].bn_train.shard(self.bn_strategy) self.conv5.cell_list[5].relu.shard(self.relu_strategy) self.conv5.cell_list[6].conv2d.shard(self.conv2d_strategy) self.conv5.cell_list[7].bn_train.shard(self.bn_strategy) self.conv5.cell_list[8].relu.shard(self.relu_strategy) self.pool5.max_pool.shard(((1, 1, 1, device_num),)) self.conv6.cell_list[0].conv2d.shard(self.conv2d_strategy) self.conv6.cell_list[1].bn_train.shard(self.bn_strategy) self.conv6.cell_list[2].relu.shard(self.relu_strategy) self.conv7.cell_list[0].conv2d.shard(self.conv2d_strategy) self.conv7.cell_list[1].bn_train.shard(self.bn_strategy) self.conv7.cell_list[2].relu.shard(self.relu_strategy) self.score_fr.conv2d.shard(self.conv2d_strategy) self.upscore2.conv2d_transpose.shard(self.conv2d_strategy) self.score_pool4.conv2d.shard(self.conv2d_strategy) self.upscore_pool4.conv2d_transpose.shard(self.conv2d_strategy) self.score_pool3.conv2d.shard(self.conv2d_strategy) self.upscore8.conv2d_transpose.shard(self.conv2d_strategy) self.add1.shard(self.add_strategy) self.add2.shard(self.add_strategy) def construct(self, x): x1 = self.conv1(x) p1 = self.pool1(x1) x2 = self.conv2(p1) p2 = self.pool2(x2) x3 = self.conv3(p2) p3 = self.pool3(x3) x4 = self.conv4(p3) p4 = self.pool4(x4) x5 = self.conv5(p4) p5 = self.pool5(x5) x6 = self.conv6(p5) x7 = self.conv7(x6) sf = self.score_fr(x7) u2 = self.upscore2(sf) s4 = self.score_pool4(p4) f4 = s4 + u2 u4 = self.upscore_pool4(f4) s3 = self.score_pool3(p3) f3 = s3 + u4 out = self.upscore8(f3) return out
48.592417
103
0.609968
1,349
10,253
4.4255
0.122313
0.114573
0.066332
0.080905
0.7933
0.776549
0.738693
0.664824
0.621441
0.40737
0
0.062832
0.265776
10,253
210
104
48.82381
0.730207
0.062226
0
0.297297
0
0
0.03131
0
0
0
0
0
0
1
0.016216
false
0
0.010811
0
0.037838
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
31d1dcdc84557e4ef3daa2e742b3df53f7c45b0e
1,569
py
Python
fdrtd_server/exceptions.py
UNakade/server
f659524242d01fe67f9801ab41fabf46640ad590
[ "MIT" ]
null
null
null
fdrtd_server/exceptions.py
UNakade/server
f659524242d01fe67f9801ab41fabf46640ad590
[ "MIT" ]
null
null
null
fdrtd_server/exceptions.py
UNakade/server
f659524242d01fe67f9801ab41fabf46640ad590
[ "MIT" ]
null
null
null
import logging as _logging def handle_exception(e): if isinstance(e, ApiError): _logging.exception(e.message) return e.message, e.statuscode _logging.exception(repr(e)) return None, 500 class ApiError(Exception): def __init__(self, statuscode, message): self.statuscode = statuscode self.message = message def __str__(self): return self.message class InternalServerError(ApiError): def __init__(self, message): super().__init__(500, f'internal server error: {message}') class NotAvailable(ApiError): def __init__(self, missing): super().__init__(501, f'not implemented / not available: {missing}') class MissingParameter(ApiError): def __init__(self, missing): super().__init__(400, f'missing parameter: {missing}') class InvalidParameter(ApiError): def __init__(self, parameter, invalid): super().__init__(400, f'invalid parameter: {parameter} = {invalid}') class InvalidIdentifier(ApiError): def __init__(self, identifier, invalid): super().__init__(404, f'invalid identifier: {identifier} = {invalid}') class MicroserviceNotFound(ApiError): def __init__(self, missing): super().__init__(404, f'microservice not available: {missing}') class FunctionNotFound(ApiError): def __init__(self, missing): super().__init__(404, f'function not available: {missing}') class FunctionNotPublic(ApiError): def __init__(self, missing): super().__init__(403, f'function not public: {missing}')
22.414286
78
0.684512
170
1,569
5.870588
0.276471
0.063126
0.099198
0.152305
0.183367
0.183367
0.183367
0.078156
0.078156
0
0
0.021378
0.195029
1,569
69
79
22.73913
0.768804
0
0
0.135135
0
0
0.183556
0
0
0
0
0
0
1
0.297297
false
0
0.027027
0.027027
0.648649
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
31e18d81d721e6fc0a0c74da919f35393478b123
324
py
Python
test/programytest/storage/entities/test_nodes.py
cdoebler1/AIML2
ee692ec5ea3794cd1bc4cc8ec2a6b5e5c20a0d6a
[ "MIT" ]
345
2016-11-23T22:37:04.000Z
2022-03-30T20:44:44.000Z
test/programytest/storage/entities/test_nodes.py
MikeyBeez/program-y
00d7a0c7d50062f18f0ab6f4a041068e119ef7f0
[ "MIT" ]
275
2016-12-07T10:30:28.000Z
2022-02-08T21:28:33.000Z
test/programytest/storage/entities/test_nodes.py
VProgramMist/modified-program-y
f32efcafafd773683b3fe30054d5485fe9002b7d
[ "MIT" ]
159
2016-11-28T18:59:30.000Z
2022-03-20T18:02:44.000Z
import unittest import unittest.mock from programy.storage.entities.nodes import NodesStore class NodesStoreTest(unittest.TestCase): def test_load(self): store = NodesStore() with self.assertRaises(NotImplementedError): collector = unittest.mock.Mock() store.load(collector)
21.6
54
0.703704
33
324
6.878788
0.636364
0.123348
0
0
0
0
0
0
0
0
0
0
0.216049
324
14
55
23.142857
0.893701
0
0
0
0
0
0
0
0
0
0
0
0.111111
1
0.111111
false
0
0.333333
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
9ed0bf65b8f404e11c189c592c88427ef28a69fc
685
py
Python
lh_lib/sensors/esp32/touch.py
lh70/s-connect-python
5a4ca17690ec700b36faf69ea744c514f532cc48
[ "Apache-2.0" ]
null
null
null
lh_lib/sensors/esp32/touch.py
lh70/s-connect-python
5a4ca17690ec700b36faf69ea744c514f532cc48
[ "Apache-2.0" ]
null
null
null
lh_lib/sensors/esp32/touch.py
lh70/s-connect-python
5a4ca17690ec700b36faf69ea744c514f532cc48
[ "Apache-2.0" ]
null
null
null
from machine import Pin from lh_lib.sensors.sensor import AbstractSensor class Touch(AbstractSensor): """ This represents a touch sensor with integrated Logic, where there is only one output pin, which digitally represents the touched state. pin:integer can be one of all available GPIO pins: 0-19, 21-23, 25-27, 32-39 it is NOT recommended to pick one of the following pins: (1, 3) -> serial, (6, 7, 8, 11, 16, 17) -> embedded flash """ def __init__(self, pin=35): super().__init__() self.pin = Pin(pin, Pin.IN) """ sets 0 for LOW and 1 for HIGH """ def update(self): self.value = self.pin.value()
28.541667
130
0.636496
104
685
4.105769
0.711538
0.04918
0.051522
0
0
0
0
0
0
0
0
0.059289
0.261314
685
23
131
29.782609
0.784585
0.49635
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
9ed4b01964cfce5140c8270d443eb2c516032d63
2,830
py
Python
SAMAE/data/__init__.py
Lisa-pa/SAMAE
8d52fd6f8c2634c82f2071233e9796ea322f6360
[ "MIT" ]
null
null
null
SAMAE/data/__init__.py
Lisa-pa/SAMAE
8d52fd6f8c2634c82f2071233e9796ea322f6360
[ "MIT" ]
4
2021-03-20T09:31:02.000Z
2022-03-12T00:51:19.000Z
SAMAE/data/__init__.py
Lisa-pa/AponeurosesDetection
8d52fd6f8c2634c82f2071233e9796ea322f6360
[ "MIT" ]
null
null
null
"""Standard test images. """ import os from skimage.io import imread data_dir = os.path.abspath(os.path.dirname(__file__)) __all__ = ['data_dir', 'circle', 'skmuscimg'] def _load(f, as_gray=False): """Load an image file located in the data directory. Parameters ---------- f : string File name. as_gray : bool, optional Whether to convert the image to grayscale. Returns ------- img : ndarray Image loaded from ``data_dir``. """ # importing io is quite slow since it scans all the backends # we lazy import it here return imread(f, as_gray=as_gray) def circle(): """Synthetic image of a circle Returns ------- circle : (xdim, ydim) bool ndarray Circle image. """ return _load(os.path.join(data_dir, "circle.bmp")) def skmuscimg(): """Cropped US image of a musculoskeletal muscle """ return _load(os.path.join(data_dir, "skmuscle.jpg")) def panoimg(): """Panoramic US image of a musculoskeletal muscle """ return _load(os.path.join(data_dir, "panoramic_echo.jpg")) def simpleimg(): """Simple US image of a musculoskeletal muscle """ return _load(os.path.join(data_dir, "simple_echo.jpg")) def downloadFromDropbox(tok, path2file): """Download an image from a Dropbox account. Args: tok (string): access token that connects to the wanted app in Dropbox account path2file (string): Path of the file to download, in the app corresponding to the above token. Output: image (numpy.ndarray): 3-channel color image, with coefficients' type == uint8 Example: 1) Register a new app in the App Console of your Dropbox account. Set up parameters as you want. 2) In Dropbox>Applications>MyApp, import your data. 3) In the settings page of MyApp, generate a token and copy it. It should look like a random string of letters and figures, as below. (!!!This access token can be used to access your account via the API. Don’t share your access token with anyone!!!) > token = 'Q8yhHQ4wquAAAAAAAAABRPb9LYdKAr2WGcmhhJ8egiX4_Qak6YZwBw4GUpX9DVeb' //token not available anymore > path = '/cropped_20181002_153426_image.jpg' > dt = downloadFromDropbox(token, path); """ import dropbox import numpy as np import cv2 dbx = dropbox.Dropbox(tok) try: metadata, file = dbx.files_download(path2file) except dropbox.exceptions.HttpError as err: print('*** HTTP error', err) return None data = np.frombuffer(file.content, np.uint8) image = cv2.imdecode(data, 1) return image
28.877551
114
0.621908
356
2,830
4.856742
0.441011
0.02834
0.018508
0.037016
0.116252
0.116252
0.116252
0.100636
0.100636
0.100636
0
0.017267
0.283746
2,830
98
115
28.877551
0.835718
0.591166
0
0
0
0
0.094748
0
0
0
0
0
0
1
0.222222
false
0
0.185185
0
0.666667
0.037037
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
732764ebd0702a98bf1fa40e238672b9d8162849
397
py
Python
tests/test_modules/test_ADPandABlocks/test_adpandablocks_blocks.py
aaron-parsons/pymalcolm
4e7ebd6b09382ab7e013278a81097d17873fa5c4
[ "Apache-2.0" ]
null
null
null
tests/test_modules/test_ADPandABlocks/test_adpandablocks_blocks.py
aaron-parsons/pymalcolm
4e7ebd6b09382ab7e013278a81097d17873fa5c4
[ "Apache-2.0" ]
null
null
null
tests/test_modules/test_ADPandABlocks/test_adpandablocks_blocks.py
aaron-parsons/pymalcolm
4e7ebd6b09382ab7e013278a81097d17873fa5c4
[ "Apache-2.0" ]
null
null
null
from mock import Mock from malcolm.testutil import ChildTestCase from malcolm.modules.ADPandABlocks.blocks import pandablocks_runnable_block class TestADPandABlocksBlocks(ChildTestCase): def test_pandablocks_runnable_block(self): self.create_child_block( pandablocks_runnable_block, Mock(), mri_prefix="mri_prefix", pv_prefix="pv_prefix", config_dir="/tmp")
33.083333
78
0.7733
46
397
6.369565
0.543478
0.194539
0.245734
0
0
0
0
0
0
0
0
0
0.153652
397
11
79
36.090909
0.872024
0
0
0
0
0
0.057935
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.625
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
732b698a8ba47881b21329d7619d19e7979a1509
1,433
py
Python
subscriptions/models.py
emil-magnusson/py-on-api
50967ea9d6a189c2c1cb75bd3e2b8ab817077634
[ "MIT" ]
null
null
null
subscriptions/models.py
emil-magnusson/py-on-api
50967ea9d6a189c2c1cb75bd3e2b8ab817077634
[ "MIT" ]
4
2021-03-30T14:10:30.000Z
2021-09-22T19:29:56.000Z
subscriptions/models.py
emil-magnusson/py-on-api
50967ea9d6a189c2c1cb75bd3e2b8ab817077634
[ "MIT" ]
null
null
null
# subscriptions/models.py import uuid from django.db import models from accesses.models import Accesses, Services class OperationalState(models.Model): operationalState = models.CharField(primary_key=True, max_length=50) def __str__(self): return self.operationalState class Subscriptions(models.Model): subscriptionId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) accessId = models.ForeignKey(Accesses, related_name='subscriptions', on_delete=models.PROTECT) service = models.ForeignKey(Services, on_delete=models.PROTECT) operationalState = models.ForeignKey(OperationalState, on_delete=models.PROTECT) spReference = models.CharField(max_length=50, default=uuid.uuid4().hex[:6].upper()) spSubscriptionId = models.UUIDField(default=uuid.uuid4, editable=False) #option82 = models.OneToOneField(Option82, on_delete=models.PROTECT) ##dhcpIdentifier note = models.CharField(max_length=350, null=True, blank=True) ##characteristics def __str__(self): return '{} - {}'.format(self.service, self.subscriptionId) class Equipment(models.Model): subscriptionId = models.ForeignKey(Subscriptions, related_name='equipment', on_delete=models.PROTECT) vendorId = models.CharField(max_length=250) macAddress = models.CharField(max_length=250) def __str__(self): return '{} - {}'.format(self.vendorId, self.macAddress)
37.710526
105
0.752966
162
1,433
6.5
0.345679
0.071225
0.066477
0.099715
0.155745
0.049383
0
0
0
0
0
0.016908
0.133287
1,433
37
106
38.72973
0.830918
0.083043
0
0.130435
0
0
0.027565
0
0
0
0
0
0
1
0.130435
false
0
0.130435
0.130435
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
733cfbac832497fb734f7d0cde01792ad3325cd5
4,038
py
Python
app/core/models.py
echosisdev/openmrs-disa-sync
077331c5b96394f15cc90aba7ce4018e10d5608d
[ "MIT" ]
null
null
null
app/core/models.py
echosisdev/openmrs-disa-sync
077331c5b96394f15cc90aba7ce4018e10d5608d
[ "MIT" ]
null
null
null
app/core/models.py
echosisdev/openmrs-disa-sync
077331c5b96394f15cc90aba7ce4018e10d5608d
[ "MIT" ]
null
null
null
from django.db import models from django.db.models.signals import pre_save, post_save from core.utils.constants import Constants from core.utils.data_convertion import DataConversion class ExcelFile(models.Model): file_name = models.FileField(upload_to='uploads') date_created = models.DateTimeField(auto_now_add=True) activated = models.BooleanField(default=False) def __str__(self): return f'File Id{self.id} File name {self.file_name}' class CsvFile(models.Model): file_name = models.FileField(upload_to='uploads') date_uploaded = models.DateTimeField(auto_now_add=True) activated = models.BooleanField(default=False) def __str__(self): return f'File Id{self.id} File name {self.file_name}' class ViralLoad(models.Model): laboratory_id = models.CharField(max_length=100, null=True, blank=True) sector = models.CharField(max_length=30, blank=True, null=True) number_orig_lab = models.CharField(max_length=100, blank=True, null=True) province = models.CharField(max_length=100, blank=True, null=True) district = models.CharField(max_length=100, blank=True, null=True) health_facility = models.CharField(max_length=100, blank=True, null=True) patient_name = models.CharField(max_length=100, blank=True, null=True) gender = models.CharField(max_length=100, blank=True, null=True) reference = models.CharField(max_length=100, blank=True, null=True) capture_date = models.DateField(null=True, blank=True) access_date = models.DateField(null=True, blank=True) nid = models.CharField(max_length=100, blank=True, null=True) viral_load = models.CharField(max_length=100, null=True, blank=True) viral_load_qualitative = models.CharField( max_length=100, blank=True, null=True) synced = models.BooleanField(default=False) formatted_nid = models.CharField(max_length=100, blank=True, null=True) class Meta: verbose_name = 'Viral Load' verbose_name_plural = 'Viral Loads' def __str__(self): return self.patient_name class Patient(models.Model): patient_uuid = models.CharField(max_length=500) #person_id = models.IntegerField() nid = models.CharField(max_length=100, blank=True, null=True) patient_name = models.CharField(max_length=100, blank=True, null=True) def __str__(self): return self.patient_name class Encounter(models.Model): encounterDatetime = models.DateTimeField(auto_now_add=True) patient = models.ForeignKey(Patient, on_delete=models.CASCADE) encounterType_uuid = models.CharField( max_length=255, default=Constants().get_uuids().get('encounter_type')) location_uuid = models.CharField( max_length=255, default=Constants().get_uuids().get('hpt')) form_uuid = models.CharField( max_length=255, default=Constants().get_uuids().get('form')) synced = models.BooleanField(default=False) def __str__(self): return self.patient.name class Observation(models.Model): patient = models.ForeignKey( Patient, on_delete=models.CASCADE) obsDateTime = models.DateTimeField(auto_now_add=True) concept = models.CharField(max_length=255) value_numeric = models.PositiveIntegerField(null=True, blank=True) value_coded = models.PositiveIntegerField(null=True, blank=True) value_datetime = models.DateTimeField(null=True, blank=True) encounter = models.ForeignKey(Encounter, on_delete=models.CASCADE) location = models.CharField( max_length=255, default=Constants().get_uuids().get('hpt')) value = models.CharField(max_length=255) voided = models.BooleanField(default=False) synced = models.BooleanField(default=False) def __str__(self): return self.id # def insert_formatted_nid(sender, instance, created, *args, **kwargs): # if created: # instance.formatted_nid = DataConversion.format_nid(instance.nid) # print(instance.formatted_nid) # post_save.connect(insert_formatted_nid, sender=ViralLoad)
38.826923
78
0.733779
522
4,038
5.47318
0.214559
0.115506
0.138607
0.184809
0.662583
0.621281
0.59818
0.539377
0.472874
0.349667
0
0.019034
0.154284
4,038
103
79
39.203884
0.81757
0.071322
0
0.315068
0
0
0.038729
0
0
0
0
0
0
1
0.082192
false
0
0.054795
0.082192
0.890411
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
734ac6f57c878180c7a2282d8eb947a2ce6b549f
2,588
py
Python
piptools/repositories/base.py
LaudateCorpus1/pip-tools
53c927262d816c336664afee9b03996bfb8f9c44
[ "BSD-3-Clause" ]
2
2021-12-02T11:41:02.000Z
2021-12-27T12:01:53.000Z
piptools/repositories/base.py
LaudateCorpus1/pip-tools
53c927262d816c336664afee9b03996bfb8f9c44
[ "BSD-3-Clause" ]
3
2020-11-20T18:42:20.000Z
2021-06-20T09:38:27.000Z
piptools/repositories/base.py
LaudateCorpus1/pip-tools
53c927262d816c336664afee9b03996bfb8f9c44
[ "BSD-3-Clause" ]
2
2021-07-13T08:53:43.000Z
2022-02-02T14:10:58.000Z
import optparse from abc import ABCMeta, abstractmethod from contextlib import contextmanager from typing import Iterator, Optional, Set from pip._internal.index.package_finder import PackageFinder from pip._internal.models.index import PyPI from pip._internal.network.session import PipSession from pip._internal.req import InstallRequirement class BaseRepository(metaclass=ABCMeta): DEFAULT_INDEX_URL = PyPI.simple_url def clear_caches(self) -> None: """Should clear any caches used by the implementation.""" @abstractmethod def find_best_match( self, ireq: InstallRequirement, prereleases: Optional[bool] ) -> InstallRequirement: """ Returns a pinned InstallRequirement object that indicates the best match for the given InstallRequirement according to the external repository. """ @abstractmethod def get_dependencies(self, ireq: InstallRequirement) -> Set[InstallRequirement]: """ Given a pinned, URL, or editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). They indicate the secondary dependencies for the given requirement. """ @abstractmethod def get_hashes(self, ireq: InstallRequirement) -> Set[str]: """ Given a pinned InstallRequirement, returns a set of hashes that represent all of the files for a given requirement. It is not acceptable for an editable or unpinned requirement to be passed to this function. """ @abstractmethod @contextmanager def allow_all_wheels(self) -> Iterator[None]: """ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions. """ @abstractmethod def copy_ireq_dependencies( self, source: InstallRequirement, dest: InstallRequirement ) -> None: """ Notifies the repository that `dest` is a copy of `source`, and so it has the same dependencies. Otherwise, once we prepare an ireq to assign it its name, we would lose track of those dependencies on combining that ireq with others. """ @property @abstractmethod def options(self) -> optparse.Values: """Returns parsed pip options""" @property @abstractmethod def session(self) -> PipSession: """Returns a session to make requests""" @property @abstractmethod def finder(self) -> PackageFinder: """Returns a package finder to interact with simple repository API (PEP 503)"""
34.506667
88
0.693199
294
2,588
6.044218
0.438776
0.066967
0.033765
0.032639
0.03489
0
0
0
0
0
0
0.001521
0.238022
2,588
74
89
34.972973
0.899594
0.409196
0
0.314286
0
0
0
0
0
0
0
0
0
1
0.257143
false
0
0.228571
0
0.542857
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
734d1d2e2ae1a6d7737d630a4bc5c6e70adf63d2
28,079
py
Python
billingbudgets/google/cloud/billing_budgets_v1beta1/proto/budget_model_pb2.py
hugovk/google-cloud-python
b387134827dbc3be0e1b431201e0875798002fda
[ "Apache-2.0" ]
1
2019-12-09T11:40:28.000Z
2019-12-09T11:40:28.000Z
billingbudgets/google/cloud/billing_budgets_v1beta1/proto/budget_model_pb2.py
hugovk/google-cloud-python
b387134827dbc3be0e1b431201e0875798002fda
[ "Apache-2.0" ]
1
2019-03-29T22:03:48.000Z
2019-04-02T22:24:45.000Z
billingbudgets/google/cloud/billing_budgets_v1beta1/proto/budget_model_pb2.py
hugovk/google-cloud-python
b387134827dbc3be0e1b431201e0875798002fda
[ "Apache-2.0" ]
1
2019-03-29T18:26:16.000Z
2019-03-29T18:26:16.000Z
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/billing/budgets_v1beta1/proto/budget_model.proto import sys _b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.type import money_pb2 as google_dot_type_dot_money__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/billing/budgets_v1beta1/proto/budget_model.proto", package="google.cloud.billing.budgets.v1beta1", syntax="proto3", serialized_options=_b( "\n(com.google.cloud.billing.budgets.v1beta1P\001ZKgoogle.golang.org/genproto/googleapis/cloud/billing/budgets/v1beta1;budgets" ), serialized_pb=_b( '\n=google/cloud/billing/budgets_v1beta1/proto/budget_model.proto\x12$google.cloud.billing.budgets.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x17google/type/money.proto"\xde\x03\n\x06\x42udget\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12H\n\rbudget_filter\x18\x03 \x01(\x0b\x32,.google.cloud.billing.budgets.v1beta1.FilterB\x03\xe0\x41\x01\x12G\n\x06\x61mount\x18\x04 \x01(\x0b\x32\x32.google.cloud.billing.budgets.v1beta1.BudgetAmountB\x03\xe0\x41\x02\x12Q\n\x0fthreshold_rules\x18\x05 \x03(\x0b\x32\x33.google.cloud.billing.budgets.v1beta1.ThresholdRuleB\x03\xe0\x41\x02\x12S\n\x10\x61ll_updates_rule\x18\x06 \x01(\x0b\x32\x34.google.cloud.billing.budgets.v1beta1.AllUpdatesRuleB\x03\xe0\x41\x01\x12\x11\n\x04\x65tag\x18\x07 \x01(\tB\x03\xe0\x41\x01:]\xea\x41Z\n$billingbudgets.googleapis.com/Budget\x12\x32\x62illingAccounts/{billing_account}/budgets/{budget}"\xa5\x01\n\x0c\x42udgetAmount\x12.\n\x10specified_amount\x18\x01 \x01(\x0b\x32\x12.google.type.MoneyH\x00\x12T\n\x12last_period_amount\x18\x02 \x01(\x0b\x32\x36.google.cloud.billing.budgets.v1beta1.LastPeriodAmountH\x00\x42\x0f\n\rbudget_amount"\x12\n\x10LastPeriodAmount"\xcd\x01\n\rThresholdRule\x12\x1e\n\x11threshold_percent\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12S\n\x0bspend_basis\x18\x02 \x01(\x0e\x32\x39.google.cloud.billing.budgets.v1beta1.ThresholdRule.BasisB\x03\xe0\x41\x01"G\n\x05\x42\x61sis\x12\x15\n\x11\x42\x41SIS_UNSPECIFIED\x10\x00\x12\x11\n\rCURRENT_SPEND\x10\x01\x12\x14\n\x10\x46ORECASTED_SPEND\x10\x02"H\n\x0e\x41llUpdatesRule\x12\x19\n\x0cpubsub_topic\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1b\n\x0eschema_version\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x90\x02\n\x06\x46ilter\x12\x15\n\x08projects\x18\x01 \x03(\tB\x03\xe0\x41\x01\x12\x66\n\x16\x63redit_types_treatment\x18\x04 \x01(\x0e\x32\x41.google.cloud.billing.budgets.v1beta1.Filter.CreditTypesTreatmentB\x03\xe0\x41\x01\x12\x15\n\x08services\x18\x03 \x03(\tB\x03\xe0\x41\x01"p\n\x14\x43reditTypesTreatment\x12&\n"CREDIT_TYPES_TREATMENT_UNSPECIFIED\x10\x00\x12\x17\n\x13INCLUDE_ALL_CREDITS\x10\x01\x12\x17\n\x13\x45XCLUDE_ALL_CREDITS\x10\x02\x42y\n(com.google.cloud.billing.budgets.v1beta1P\x01ZKgoogle.golang.org/genproto/googleapis/cloud/billing/budgets/v1beta1;budgetsb\x06proto3' ), dependencies=[ google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_type_dot_money__pb2.DESCRIPTOR, ], ) _THRESHOLDRULE_BASIS = _descriptor.EnumDescriptor( name="Basis", full_name="google.cloud.billing.budgets.v1beta1.ThresholdRule.Basis", filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name="BASIS_UNSPECIFIED", index=0, number=0, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="CURRENT_SPEND", index=1, number=1, serialized_options=None, type=None ), _descriptor.EnumValueDescriptor( name="FORECASTED_SPEND", index=2, number=2, serialized_options=None, type=None, ), ], containing_type=None, serialized_options=None, serialized_start=992, serialized_end=1063, ) _sym_db.RegisterEnumDescriptor(_THRESHOLDRULE_BASIS) _FILTER_CREDITTYPESTREATMENT = _descriptor.EnumDescriptor( name="CreditTypesTreatment", full_name="google.cloud.billing.budgets.v1beta1.Filter.CreditTypesTreatment", filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name="CREDIT_TYPES_TREATMENT_UNSPECIFIED", index=0, number=0, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="INCLUDE_ALL_CREDITS", index=1, number=1, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="EXCLUDE_ALL_CREDITS", index=2, number=2, serialized_options=None, type=None, ), ], containing_type=None, serialized_options=None, serialized_start=1300, serialized_end=1412, ) _sym_db.RegisterEnumDescriptor(_FILTER_CREDITTYPESTREATMENT) _BUDGET = _descriptor.Descriptor( name="Budget", full_name="google.cloud.billing.budgets.v1beta1.Budget", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="name", full_name="google.cloud.billing.budgets.v1beta1.Budget.name", index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="display_name", full_name="google.cloud.billing.budgets.v1beta1.Budget.display_name", index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="budget_filter", full_name="google.cloud.billing.budgets.v1beta1.Budget.budget_filter", index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="amount", full_name="google.cloud.billing.budgets.v1beta1.Budget.amount", index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="threshold_rules", full_name="google.cloud.billing.budgets.v1beta1.Budget.threshold_rules", index=4, number=5, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="all_updates_rule", full_name="google.cloud.billing.budgets.v1beta1.Budget.all_updates_rule", index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="etag", full_name="google.cloud.billing.budgets.v1beta1.Budget.etag", index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], serialized_options=_b( "\352AZ\n$billingbudgets.googleapis.com/Budget\0222billingAccounts/{billing_account}/budgets/{budget}" ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], serialized_start=189, serialized_end=667, ) _BUDGETAMOUNT = _descriptor.Descriptor( name="BudgetAmount", full_name="google.cloud.billing.budgets.v1beta1.BudgetAmount", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="specified_amount", full_name="google.cloud.billing.budgets.v1beta1.BudgetAmount.specified_amount", index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="last_period_amount", full_name="google.cloud.billing.budgets.v1beta1.BudgetAmount.last_period_amount", index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name="budget_amount", full_name="google.cloud.billing.budgets.v1beta1.BudgetAmount.budget_amount", index=0, containing_type=None, fields=[], ) ], serialized_start=670, serialized_end=835, ) _LASTPERIODAMOUNT = _descriptor.Descriptor( name="LastPeriodAmount", full_name="google.cloud.billing.budgets.v1beta1.LastPeriodAmount", filename=None, file=DESCRIPTOR, containing_type=None, fields=[], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], serialized_start=837, serialized_end=855, ) _THRESHOLDRULE = _descriptor.Descriptor( name="ThresholdRule", full_name="google.cloud.billing.budgets.v1beta1.ThresholdRule", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="threshold_percent", full_name="google.cloud.billing.budgets.v1beta1.ThresholdRule.threshold_percent", index=0, number=1, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="spend_basis", full_name="google.cloud.billing.budgets.v1beta1.ThresholdRule.spend_basis", index=1, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[_THRESHOLDRULE_BASIS], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], serialized_start=858, serialized_end=1063, ) _ALLUPDATESRULE = _descriptor.Descriptor( name="AllUpdatesRule", full_name="google.cloud.billing.budgets.v1beta1.AllUpdatesRule", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="pubsub_topic", full_name="google.cloud.billing.budgets.v1beta1.AllUpdatesRule.pubsub_topic", index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="schema_version", full_name="google.cloud.billing.budgets.v1beta1.AllUpdatesRule.schema_version", index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], serialized_start=1065, serialized_end=1137, ) _FILTER = _descriptor.Descriptor( name="Filter", full_name="google.cloud.billing.budgets.v1beta1.Filter", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="projects", full_name="google.cloud.billing.budgets.v1beta1.Filter.projects", index=0, number=1, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="credit_types_treatment", full_name="google.cloud.billing.budgets.v1beta1.Filter.credit_types_treatment", index=1, number=4, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="services", full_name="google.cloud.billing.budgets.v1beta1.Filter.services", index=2, number=3, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[_FILTER_CREDITTYPESTREATMENT], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], serialized_start=1140, serialized_end=1412, ) _BUDGET.fields_by_name["budget_filter"].message_type = _FILTER _BUDGET.fields_by_name["amount"].message_type = _BUDGETAMOUNT _BUDGET.fields_by_name["threshold_rules"].message_type = _THRESHOLDRULE _BUDGET.fields_by_name["all_updates_rule"].message_type = _ALLUPDATESRULE _BUDGETAMOUNT.fields_by_name[ "specified_amount" ].message_type = google_dot_type_dot_money__pb2._MONEY _BUDGETAMOUNT.fields_by_name["last_period_amount"].message_type = _LASTPERIODAMOUNT _BUDGETAMOUNT.oneofs_by_name["budget_amount"].fields.append( _BUDGETAMOUNT.fields_by_name["specified_amount"] ) _BUDGETAMOUNT.fields_by_name[ "specified_amount" ].containing_oneof = _BUDGETAMOUNT.oneofs_by_name["budget_amount"] _BUDGETAMOUNT.oneofs_by_name["budget_amount"].fields.append( _BUDGETAMOUNT.fields_by_name["last_period_amount"] ) _BUDGETAMOUNT.fields_by_name[ "last_period_amount" ].containing_oneof = _BUDGETAMOUNT.oneofs_by_name["budget_amount"] _THRESHOLDRULE.fields_by_name["spend_basis"].enum_type = _THRESHOLDRULE_BASIS _THRESHOLDRULE_BASIS.containing_type = _THRESHOLDRULE _FILTER.fields_by_name[ "credit_types_treatment" ].enum_type = _FILTER_CREDITTYPESTREATMENT _FILTER_CREDITTYPESTREATMENT.containing_type = _FILTER DESCRIPTOR.message_types_by_name["Budget"] = _BUDGET DESCRIPTOR.message_types_by_name["BudgetAmount"] = _BUDGETAMOUNT DESCRIPTOR.message_types_by_name["LastPeriodAmount"] = _LASTPERIODAMOUNT DESCRIPTOR.message_types_by_name["ThresholdRule"] = _THRESHOLDRULE DESCRIPTOR.message_types_by_name["AllUpdatesRule"] = _ALLUPDATESRULE DESCRIPTOR.message_types_by_name["Filter"] = _FILTER _sym_db.RegisterFileDescriptor(DESCRIPTOR) Budget = _reflection.GeneratedProtocolMessageType( "Budget", (_message.Message,), dict( DESCRIPTOR=_BUDGET, __module__="google.cloud.billing.budgets_v1beta1.proto.budget_model_pb2", __doc__="""A budget is a plan that describes what you expect to spend on Cloud projects, plus the rules to execute as spend is tracked against that plan, (for example, send an alert when 90% of the target spend is met). Currently all plans are monthly budgets so the usage period(s) tracked are implied (calendar months of usage back-to-back). Attributes: name: Output only. Resource name of the budget. The resource name implies the scope of a budget. Values are of the form ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. display_name: User data for display name in UI. Validation: <= 60 chars. budget_filter: Optional. Filters that define which resources are used to compute the actual spend against the budget. amount: Required. Budgeted amount. threshold_rules: Required. Rules that trigger alerts (notifications of thresholds being crossed) when spend exceeds the specified percentages of the budget. all_updates_rule: Optional. Rules to apply to all updates to the actual spend, regardless of the thresholds set in ``threshold_rules``. etag: Optional. Etag to validate that the object is unchanged for a read-modify-write operation. An empty etag will cause an update to overwrite other changes. """, # @@protoc_insertion_point(class_scope:google.cloud.billing.budgets.v1beta1.Budget) ), ) _sym_db.RegisterMessage(Budget) BudgetAmount = _reflection.GeneratedProtocolMessageType( "BudgetAmount", (_message.Message,), dict( DESCRIPTOR=_BUDGETAMOUNT, __module__="google.cloud.billing.budgets_v1beta1.proto.budget_model_pb2", __doc__="""The budgeted amount for each usage period. Attributes: budget_amount: Specification for what amount to use as the budget. specified_amount: A specified amount to use as the budget. ``currency_code`` is optional. If specified, it must match the currency of the billing account. The ``currency_code`` is provided on output. last_period_amount: Use the last period's actual spend as the budget for the present period. """, # @@protoc_insertion_point(class_scope:google.cloud.billing.budgets.v1beta1.BudgetAmount) ), ) _sym_db.RegisterMessage(BudgetAmount) LastPeriodAmount = _reflection.GeneratedProtocolMessageType( "LastPeriodAmount", (_message.Message,), dict( DESCRIPTOR=_LASTPERIODAMOUNT, __module__="google.cloud.billing.budgets_v1beta1.proto.budget_model_pb2", __doc__="""Describes a budget amount targeted to last period's spend. At this time, the amount is automatically 100% of last period's spend; that is, there are no other options yet. Future configuration will be described here (for example, configuring a percentage of last period's spend). """, # @@protoc_insertion_point(class_scope:google.cloud.billing.budgets.v1beta1.LastPeriodAmount) ), ) _sym_db.RegisterMessage(LastPeriodAmount) ThresholdRule = _reflection.GeneratedProtocolMessageType( "ThresholdRule", (_message.Message,), dict( DESCRIPTOR=_THRESHOLDRULE, __module__="google.cloud.billing.budgets_v1beta1.proto.budget_model_pb2", __doc__="""ThresholdRule contains a definition of a threshold which triggers an alert (a notification of a threshold being crossed) to be sent when spend goes above the specified amount. Alerts are automatically e-mailed to users with the Billing Account Administrator role or the Billing Account User role. The thresholds here have no effect on notifications sent to anything configured under ``Budget.all_updates_rule``. Attributes: threshold_percent: Required. Send an alert when this threshold is exceeded. This is a 1.0-based percentage, so 0.5 = 50%. Validation: non- negative number. spend_basis: Optional. The type of basis used to determine if spend has passed the threshold. Behavior defaults to CURRENT\_SPEND if not set. """, # @@protoc_insertion_point(class_scope:google.cloud.billing.budgets.v1beta1.ThresholdRule) ), ) _sym_db.RegisterMessage(ThresholdRule) AllUpdatesRule = _reflection.GeneratedProtocolMessageType( "AllUpdatesRule", (_message.Message,), dict( DESCRIPTOR=_ALLUPDATESRULE, __module__="google.cloud.billing.budgets_v1beta1.proto.budget_model_pb2", __doc__="""AllUpdatesRule defines notifications that are sent on every update to the billing account's spend, regardless of the thresholds defined using threshold rules. Attributes: pubsub_topic: Required. The name of the Cloud Pub/Sub topic where budget related messages will be published, in the form ``projects/{project_id}/topics/{topic_id}``. Updates are sent at regular intervals to the topic. The topic needs to be created before the budget is created; see https://cloud.google.com/billing/docs/how-to/budgets#manage- notifications for more details. Caller is expected to have ``pubsub.topics.setIamPolicy`` permission on the topic when it's set for a budget, otherwise, the API call will fail with PERMISSION\_DENIED. See https://cloud.google.com/pubsub/docs/access-control for more details on Pub/Sub roles and permissions. schema_version: Required. The schema version of the notification. Only "1.0" is accepted. It represents the JSON schema as defined in https://cloud.google.com/billing/docs/how- to/budgets#notification\_format """, # @@protoc_insertion_point(class_scope:google.cloud.billing.budgets.v1beta1.AllUpdatesRule) ), ) _sym_db.RegisterMessage(AllUpdatesRule) Filter = _reflection.GeneratedProtocolMessageType( "Filter", (_message.Message,), dict( DESCRIPTOR=_FILTER, __module__="google.cloud.billing.budgets_v1beta1.proto.budget_model_pb2", __doc__="""A filter for a budget, limiting the scope of the cost to calculate. Attributes: projects: Optional. A set of projects of the form ``projects/{project_id}``, specifying that usage from only this set of projects should be included in the budget. If omitted, the report will include all usage for the billing account, regardless of which project the usage occurred on. Only zero or one project can be specified currently. credit_types_treatment: Optional. If not set, default behavior is ``INCLUDE_ALL_CREDITS``. services: Optional. A set of services of the form ``services/{service_id}``, specifying that usage from only this set of services should be included in the budget. If omitted, the report will include usage for all the services. The service names are available through the Catalog API: https://cloud.google.com/billing/v1/how-tos/catalog-api. """, # @@protoc_insertion_point(class_scope:google.cloud.billing.budgets.v1beta1.Filter) ), ) _sym_db.RegisterMessage(Filter) DESCRIPTOR._options = None _BUDGET.fields_by_name["name"]._options = None _BUDGET.fields_by_name["budget_filter"]._options = None _BUDGET.fields_by_name["amount"]._options = None _BUDGET.fields_by_name["threshold_rules"]._options = None _BUDGET.fields_by_name["all_updates_rule"]._options = None _BUDGET.fields_by_name["etag"]._options = None _BUDGET._options = None _THRESHOLDRULE.fields_by_name["threshold_percent"]._options = None _THRESHOLDRULE.fields_by_name["spend_basis"]._options = None _ALLUPDATESRULE.fields_by_name["pubsub_topic"]._options = None _ALLUPDATESRULE.fields_by_name["schema_version"]._options = None _FILTER.fields_by_name["projects"]._options = None _FILTER.fields_by_name["credit_types_treatment"]._options = None _FILTER.fields_by_name["services"]._options = None # @@protoc_insertion_point(module_scope)
36.849081
2,327
0.654795
3,182
28,079
5.52797
0.148649
0.028653
0.057248
0.072484
0.568221
0.523081
0.462592
0.433371
0.36697
0.319215
0
0.040337
0.246875
28,079
761
2,328
36.897503
0.79146
0.026497
0
0.594633
1
0.004237
0.394942
0.174981
0
0
0
0
0
1
0
false
0.001412
0.011299
0
0.011299
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
73592c3ecd42d5c4a472b3d8242eb4b399af73f6
1,000
py
Python
100-Exercicios/ex039.py
thedennerdev/ExerciciosPython-Iniciante
de36c4a09700353a9a1daa7f1320e416c6201a5c
[ "MIT" ]
null
null
null
100-Exercicios/ex039.py
thedennerdev/ExerciciosPython-Iniciante
de36c4a09700353a9a1daa7f1320e416c6201a5c
[ "MIT" ]
null
null
null
100-Exercicios/ex039.py
thedennerdev/ExerciciosPython-Iniciante
de36c4a09700353a9a1daa7f1320e416c6201a5c
[ "MIT" ]
null
null
null
#Exercício Python 39: Faça um programa que leia o ano de nascimento de um jovem e informe, de acordo com a sua idade, se ele ainda vai se alistar ao serviço militar, se é a hora exata de se alistar ou se já passou do tempo do alistamento. Seu programa também deverá mostrar o tempo que falta ou que passou do prazo. import datetime current_year = datetime.datetime.today().year ano_nasc = int(input('Informe o ano de seu nascimento: ')) idade_alistamento = current_year - ano_nasc if idade_alistamento < 18: print('Ainda não está na hora de se alistar') print(f'Sua idade ainda é {idade_alistamento} anos, faltam {18 - idade_alistamento } anos. Aguarde mais um pouco!') elif idade_alistamento == 18: print(f'Sua idade já é {idade_alistamento} anos') print('Você está na idade de se alistar. Não perca tempo!') else: print('Você passou do prazo de alistamento.') print(f'Sua idade é {idade_alistamento} anos, já passou {idade_alistamento - 18} anos. Regularize a situação!')
62.5
315
0.75
166
1,000
4.445783
0.415663
0.173442
0.108401
0.056911
0
0
0
0
0
0
0
0.012136
0.176
1,000
16
316
62.5
0.883495
0.314
0
0
0
0.153846
0.584548
0
0
0
0
0
0
1
0
false
0.153846
0.076923
0
0.076923
0.461538
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
1
0
2
73617c822f5af71e4276c1b4c85554260d13ae06
982
py
Python
news/pybo/migrations/0006_auto_20211010_0322.py
Smashh712/nrib
375c9625e9efa6bb9a6f466312de3c6fcd5818a4
[ "MIT" ]
null
null
null
news/pybo/migrations/0006_auto_20211010_0322.py
Smashh712/nrib
375c9625e9efa6bb9a6f466312de3c6fcd5818a4
[ "MIT" ]
null
null
null
news/pybo/migrations/0006_auto_20211010_0322.py
Smashh712/nrib
375c9625e9efa6bb9a6f466312de3c6fcd5818a4
[ "MIT" ]
null
null
null
# Generated by Django 3.2.7 on 2021-10-09 18:22 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('pybo', '0005_auto_20211010_0320'), ] operations = [ migrations.AddField( model_name='issue', name='agree_representor_id', field=models.CharField(default='', max_length=20, null=True), ), migrations.AddField( model_name='issue', name='disagree_representor_id', field=models.CharField(default='', max_length=20, null=True), ), migrations.AlterField( model_name='issue', name='agree_representor', field=models.CharField(default='', max_length=20, null=True), ), migrations.AlterField( model_name='issue', name='disagree_representor', field=models.CharField(default='', max_length=20, null=True), ), ]
28.882353
73
0.580448
99
982
5.585859
0.434343
0.065099
0.101266
0.130199
0.734177
0.734177
0.575045
0.575045
0.575045
0.575045
0
0.056358
0.295316
982
33
74
29.757576
0.742775
0.045825
0
0.592593
1
0
0.135829
0.049198
0
0
0
0
0
1
0
false
0
0.037037
0
0.148148
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
7df69847b16a72c401c8d03768fb93c74d01b5c9
2,114
py
Python
morepath/tests/test_method_directive.py
DuncanBetts/morepath
acad10489b051df9c512f6735a9338854745a599
[ "BSD-3-Clause" ]
null
null
null
morepath/tests/test_method_directive.py
DuncanBetts/morepath
acad10489b051df9c512f6735a9338854745a599
[ "BSD-3-Clause" ]
null
null
null
morepath/tests/test_method_directive.py
DuncanBetts/morepath
acad10489b051df9c512f6735a9338854745a599
[ "BSD-3-Clause" ]
null
null
null
import morepath from webtest import TestApp as Client def test_implicit_function(): class app(morepath.App): @morepath.dispatch_method() def one(self): return "Default one" @morepath.dispatch_method() def two(self): return "Default two" @app.path(path='') class Model(object): def __init__(self): pass @app.method(app.one) def one_impl(self): return self.two() @app.method(app.two) def two_impl(self): return "The real two" @app.view(model=Model) def default(self, request): return request.app.one() c = Client(app()) response = c.get('/') assert response.body == b'The real two' def test_implicit_function_mounted(): class base(morepath.App): @morepath.dispatch_method() def one(self): return "Default one" @morepath.dispatch_method() def two(self): return "Default two" class alpha(base): pass class beta(base): def __init__(self, id): self.id = id @alpha.mount(path='mounted/{id}', app=beta) def mount_beta(id): return beta(id=id) class AlphaRoot(object): pass class Root(object): def __init__(self, id): self.id = id @alpha.path(path='/', model=AlphaRoot) def get_alpha_root(): return AlphaRoot() @beta.path(path='/', model=Root) def get_root(app): return Root(app.id) @beta.method(base.one) def one_impl(self): return self.two() @beta.method(base.two) def two_impl(self): return "The real two" @alpha.view(model=AlphaRoot) def alpha_default(self, request): return request.app.one() @beta.view(model=Root) def default(self, request): return "View for %s, message: %s" % (self.id, request.app.one()) c = Client(alpha()) response = c.get('/mounted/1') assert response.body == b'View for 1, message: The real two' response = c.get('/') assert response.body == b'Default one'
21.793814
72
0.580889
269
2,114
4.453532
0.178439
0.066778
0.073456
0.083472
0.488314
0.440735
0.440735
0.327212
0.233723
0.178631
0
0.001329
0.288079
2,114
96
73
22.020833
0.794684
0
0
0.471429
0
0
0.082308
0
0
0
0
0
0.042857
1
0.271429
false
0.042857
0.028571
0.2
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
7dfb15185b5928b42e0c69caa80b31116a8fea1a
1,715
py
Python
saleor/order/migrations/0072_django_price_2.py
elwoodxblues/saleor
5e4e4a4259a011d24b04ebd24c77c689de843fa1
[ "CC-BY-4.0" ]
19
2019-12-03T17:28:07.000Z
2021-09-10T21:30:52.000Z
saleor/order/migrations/0072_django_price_2.py
elwoodxblues/saleor
5e4e4a4259a011d24b04ebd24c77c689de843fa1
[ "CC-BY-4.0" ]
51
2019-12-06T08:06:07.000Z
2021-05-06T02:10:50.000Z
saleor/order/migrations/0072_django_price_2.py
elwoodxblues/saleor
5e4e4a4259a011d24b04ebd24c77c689de843fa1
[ "CC-BY-4.0" ]
20
2020-02-03T00:38:59.000Z
2022-01-03T13:07:52.000Z
# Generated by Django 2.2.4 on 2019-08-14 09:13 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("order", "0071_order_gift_cards")] operations = [ migrations.RenameField( model_name="order", old_name="shipping_price_gross", new_name="shipping_price_gross_amount", ), migrations.RenameField( model_name="order", old_name="shipping_price_net", new_name="shipping_price_net_amount", ), migrations.RenameField( model_name="order", old_name="total_gross", new_name="total_gross_amount" ), migrations.RenameField( model_name="order", old_name="total_net", new_name="total_net_amount" ), migrations.RenameField( model_name="orderline", old_name="unit_price_gross", new_name="unit_price_gross_amount", ), migrations.RenameField( model_name="orderline", old_name="unit_price_net", new_name="unit_price_net_amount", ), migrations.AddField( model_name="order", name="currency", field=models.CharField( default=settings.DEFAULT_CURRENCY, max_length=settings.DEFAULT_CURRENCY_CODE_LENGTH, ), ), migrations.AddField( model_name="orderline", name="currency", field=models.CharField( default=settings.DEFAULT_CURRENCY, max_length=settings.DEFAULT_CURRENCY_CODE_LENGTH, ), ), ]
31.181818
85
0.58484
168
1,715
5.619048
0.279762
0.076271
0.165254
0.190678
0.617585
0.617585
0.611229
0.595339
0.595339
0.349576
0
0.016253
0.318367
1,715
54
86
31.759259
0.791275
0.026239
0
0.666667
1
0
0.18705
0.070144
0
0
0
0
0
1
0
false
0
0.041667
0
0.104167
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
b40ee079a577a77555888197b34380d7e63acfd3
517
py
Python
src/waldur_mastermind/notifications/migrations/0002_json_field.py
opennode/nodeconductor-assembly-waldur
cad9966389dc9b52b13d2301940c99cf4b243900
[ "MIT" ]
2
2017-01-20T15:26:25.000Z
2017-08-03T04:38:08.000Z
src/waldur_mastermind/notifications/migrations/0002_json_field.py
opennode/nodeconductor-assembly-waldur
cad9966389dc9b52b13d2301940c99cf4b243900
[ "MIT" ]
null
null
null
src/waldur_mastermind/notifications/migrations/0002_json_field.py
opennode/nodeconductor-assembly-waldur
cad9966389dc9b52b13d2301940c99cf4b243900
[ "MIT" ]
null
null
null
# Generated by Django 3.2 on 2022-01-31 14:26 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('notifications', '0001_initial'), ] operations = [ migrations.AlterField( model_name='notification', name='emails', field=models.JSONField(), ), migrations.AlterField( model_name='notification', name='query', field=models.JSONField(), ), ]
21.541667
45
0.560928
46
517
6.23913
0.673913
0.139373
0.174216
0.202091
0.313589
0.313589
0
0
0
0
0
0.051576
0.324952
517
23
46
22.478261
0.770774
0.083172
0
0.470588
1
0
0.127119
0
0
0
0
0
0
1
0
false
0
0.058824
0
0.235294
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
b4148b82caffcb3d401203b514031ef55ddaf4b5
1,279
py
Python
CodigoSOLID.py
JhonGalarza/SOLID
5ee45f136a24cb2300837c8ed89accdc2f299c83
[ "MIT" ]
null
null
null
CodigoSOLID.py
JhonGalarza/SOLID
5ee45f136a24cb2300837c8ed89accdc2f299c83
[ "MIT" ]
null
null
null
CodigoSOLID.py
JhonGalarza/SOLID
5ee45f136a24cb2300837c8ed89accdc2f299c83
[ "MIT" ]
null
null
null
#DATOS DE ENTRADA ANIMAL= int(input("¿De cual animal quiere conocer la caracteristicas? 1.Leon 2.Ballena 3.Tucan? ")) class Animal: def __init__(self, ANIMAL): self.ANIMAL = ANIMAL def acciones_comun(): comun = "Comer" return comun def sentido_vista(): vista = "Puede ver" return vista class Animal_Tierra: def acciones_Tierra(): Tierra = "camina en cuatro patas" return Tierra class Animal_Agua: def acciones_Agua(): return "Nada bajo el agua" class Animal_Aire (Animal): def acciones_Aire(): return "Vuela" class Leon (Animal, Animal_Tierra): def llamar(): caracteristicas = () return caracteristicas class Ballena(Animal, Animal_Agua): def llamar(): caracteristicas = () return caracteristicas class Tucan(Animal, Animal_Aire): def llamar(): caracteristicas = () return caracteristicas if ANIMAL == 1 : print ("debe imprimir las caracteristicas del leon, el leon es clase hija de animal y debe agragar animal_tierra" ) elif ANIMAL == 2 : print ("lo mismo que el leon, pero con la ballena") elif ANIMAL == 3 : print("Lo mismo pero con el tucan")
24.596154
120
0.620797
152
1,279
5.125
0.388158
0.056483
0.092426
0.115533
0.186136
0.12837
0
0
0
0
0
0.006659
0.295543
1,279
52
121
24.596154
0.856826
0.01251
0
0.236842
0
0
0.252475
0
0
0
0
0
0
1
0.236842
false
0
0
0.052632
0.631579
0.078947
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
b45613d6f68d15fc7e95c155cc5ae42470c0d2ab
694
py
Python
backtest/tests/test_strategy.py
Christakou/backtest
fa97f50b36a1d56fe667250169ed50a8d9121c3c
[ "MIT" ]
null
null
null
backtest/tests/test_strategy.py
Christakou/backtest
fa97f50b36a1d56fe667250169ed50a8d9121c3c
[ "MIT" ]
null
null
null
backtest/tests/test_strategy.py
Christakou/backtest
fa97f50b36a1d56fe667250169ed50a8d9121c3c
[ "MIT" ]
null
null
null
import pytest from backtest.strategy import BuyAndHoldEqualAllocation @pytest.fixture def strategy(): symbols = ('AAPL', 'GOOG') strategy = BuyAndHoldEqualAllocation(relevant_symbols=symbols) return strategy def test_strategy_execute(strategy): strategy.execute() assert len(strategy.holdings) > 0 assert len(strategy.trades) > 0 def test_holdings_at(strategy): strategy.execute() assert (strategy._holdings_at('2018-05-05') =={}) assert (strategy._holdings_at('2021-05-06') == {'AAPL': 7466}) assert (strategy._holdings_at('2021-05-07') == {'AAPL': 3862, 'GOOG': 209}) assert (strategy._holdings_at('2021-05-08') == {'AAPL': 3862, 'GOOG': 209})
33.047619
79
0.698847
83
694
5.686747
0.361446
0.169492
0.186441
0.20339
0.190678
0.190678
0
0
0
0
0
0.087838
0.146974
694
21
80
33.047619
0.709459
0
0
0.117647
0
0
0.097842
0
0
0
0
0
0.352941
1
0.176471
false
0
0.117647
0
0.352941
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
b457086f7edbfe9899dbbd18e732ac94f61af7c3
2,798
py
Python
src/api/fundings/entities.py
cbn-alpin/gefiproj-api
35e3f00dd71bcdd9ad751307ac379aa78d1545cf
[ "MIT" ]
2
2020-10-15T15:16:08.000Z
2020-11-06T10:41:13.000Z
src/api/fundings/entities.py
cbn-alpin/gefiproj-api
35e3f00dd71bcdd9ad751307ac379aa78d1545cf
[ "MIT" ]
1
2020-11-14T19:40:14.000Z
2020-11-14T19:40:14.000Z
src/api/fundings/entities.py
cbn-alpin/gefiproj-api
35e3f00dd71bcdd9ad751307ac379aa78d1545cf
[ "MIT" ]
null
null
null
from marshmallow import Schema, fields, validate from sqlalchemy import Column, String, Integer, Float, Date, ForeignKey from sqlalchemy.orm import relationship from ..funders.entities import Funder, FunderSchema from src.api import db from src.shared.entity import Base class Funding(Base, db.Model): __tablename__ = 'financement' id_f = Column(Integer, primary_key=True) id_p = Column(Integer, nullable=False) id_financeur = Column(Integer, ForeignKey('financeur.id_financeur'), nullable=False) financeur = relationship("Funder") montant_arrete_f = Column(Float, nullable=False) statut_f = Column(String(250), nullable=False) date_solde_f = Column(Date) date_arrete_f = Column(Date) date_limite_solde_f = Column(Date) commentaire_admin_f = Column(String(250)) commentaire_resp_f = Column(String(250)) numero_titre_f = Column(String(250)) annee_titre_f = Column(String(250)) imputation_f = Column(String(250)) def __init__(self, id_p, id_financeur, montant_arrete_f, statut_f, date_solde_f = None, date_arrete_f=None, date_limite_solde_f=None, commentaire_admin_f='', commentaire_resp_f='', numero_titre_f='', annee_titre_f='', imputation_f='', id_f=''): if id_f != '': self.id_f = id_f self.id_p = id_p self.id_financeur = id_financeur self.montant_arrete_f = montant_arrete_f self.statut_f = statut_f self.date_solde_f = date_solde_f self.date_arrete_f = date_arrete_f self.date_limite_solde_f = date_limite_solde_f self.commentaire_admin_f = commentaire_admin_f self.commentaire_resp_f = commentaire_resp_f self.numero_titre_f = numero_titre_f self.annee_titre_f = annee_titre_f self.imputation_f = imputation_f class FundingSchema(Schema): id_f = fields.Integer() id_p = fields.Integer(required=True) id_financeur = fields.Integer(required=True) financeur = fields.Nested(FunderSchema) montant_arrete_f = fields.Float(required=True) statut_f = fields.Str(validate=validate.OneOf(["ANTR", "ATR", "SOLDE"]), required=True) date_solde_f = fields.Date(allow_none=True) date_arrete_f = fields.Date(allow_none=True) date_limite_solde_f = fields.Date(allow_none=True) commentaire_admin_f = fields.Str(allow_none=True) commentaire_resp_f = fields.Str(allow_none=True) numero_titre_f = fields.Str(allow_none=True) annee_titre_f = fields.Str(allow_none=True) imputation_f = fields.Str(allow_none=True) # TODO find solution to replace because option unknown=INCLUDE don't work in a list difference = fields.Float(allow_none=True) solde = fields.Float(allow_none=True) nom_financeur = fields.Str(allow_none=True)
43.046154
111
0.723016
396
2,798
4.772727
0.207071
0.040741
0.075661
0.050794
0.191005
0.113757
0.077249
0
0
0
0
0.00786
0.181558
2,798
65
112
43.046154
0.817467
0.028949
0
0
0
0
0.018778
0.0081
0
0
0
0.015385
0
1
0.017544
false
0
0.105263
0
0.719298
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
b4605592d622fde2874b27f05a1e575beae84ca9
136
py
Python
Legacy/Audit_Sweep/daily_audit_cron.py
QualiSystemsLab/Power-Management
f90f5971d80f17f45c8ac3f43ff93c0071572dd0
[ "Apache-2.0" ]
null
null
null
Legacy/Audit_Sweep/daily_audit_cron.py
QualiSystemsLab/Power-Management
f90f5971d80f17f45c8ac3f43ff93c0071572dd0
[ "Apache-2.0" ]
null
null
null
Legacy/Audit_Sweep/daily_audit_cron.py
QualiSystemsLab/Power-Management
f90f5971d80f17f45c8ac3f43ff93c0071572dd0
[ "Apache-2.0" ]
null
null
null
from power_audit import PowerAudit def main(): local = PowerAudit() local.full_audit() if __name__ == '__main__': main()
13.6
34
0.661765
16
136
5
0.6875
0
0
0
0
0
0
0
0
0
0
0
0.220588
136
9
35
15.111111
0.754717
0
0
0
0
0
0.058824
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
b46a7333f1fb95d927bac95badb76d52d1539743
1,691
py
Python
boilerplate_app/serializers.py
taher-systango/DjangoUnboxed
808ab771a44564458b897b6ec854c08f43cccf2a
[ "MIT" ]
null
null
null
boilerplate_app/serializers.py
taher-systango/DjangoUnboxed
808ab771a44564458b897b6ec854c08f43cccf2a
[ "MIT" ]
null
null
null
boilerplate_app/serializers.py
taher-systango/DjangoUnboxed
808ab771a44564458b897b6ec854c08f43cccf2a
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import unicode_literals # Python imports. import logging import datetime import calendar # Django imports. from django.db import transaction # Rest Framework imports. from rest_framework import serializers # Third Party Library imports # local imports. from boilerplate_app.models import User, Projects class UserCreateSerializer(serializers.ModelSerializer): password = serializers.CharField(write_only=True) def validate(self, data, *args, **kwargs): return super(UserCreateSerializer, self).validate(data, *args, **kwargs) @transaction.atomic() def create(self, validated_data): # Register new users user = super(UserCreateSerializer, self).create(validated_data) user.set_password(validated_data['password']) user.save() return user class Meta: model = User fields = ('email', 'id', 'password', 'username', 'first_name', 'last_name', 'role') extra_kwargs = {'password':{'write_only':True}} class UserListSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id', 'first_name', 'last_name', 'email', 'role') class ProjectsCreateSerializer(serializers.ModelSerializer): class Meta: model = Projects fields = ('project_name','user') def create(self, validated_data): user = User.objects.get(pk=validated_data.pop('user')) return Projects.objects.create(**validated_data,user=user) class ProjectsListSerializer(serializers.ModelSerializer): class Meta: model = Projects fields = ('id', 'project_name', 'user')
25.621212
91
0.686576
182
1,691
6.252747
0.406593
0.068541
0.049209
0.092267
0.205624
0.094903
0.094903
0
0
0
0
0.000738
0.198699
1,691
66
92
25.621212
0.839114
0.094619
0
0.277778
0
0
0.091864
0
0
0
0
0
0
1
0.083333
false
0.111111
0.194444
0.027778
0.611111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
b4723dbed92e900ca2787cc70a21f796a78c3f12
2,307
py
Python
tests/conftest.py
arosen93/jobflow
fbd5868394c6f4f6b4f2e0ccf4b7ff7d21fe7258
[ "BSD-3-Clause-LBNL" ]
10
2021-11-13T07:43:27.000Z
2022-03-14T11:05:15.000Z
tests/conftest.py
arosen93/jobflow
fbd5868394c6f4f6b4f2e0ccf4b7ff7d21fe7258
[ "BSD-3-Clause-LBNL" ]
69
2021-08-31T13:15:54.000Z
2022-03-31T21:43:56.000Z
tests/conftest.py
arosen93/jobflow
fbd5868394c6f4f6b4f2e0ccf4b7ff7d21fe7258
[ "BSD-3-Clause-LBNL" ]
5
2021-10-17T03:52:57.000Z
2022-03-31T00:17:20.000Z
import pytest @pytest.fixture(scope="session") def test_data(): from pathlib import Path module_dir = Path(__file__).resolve().parent test_dir = module_dir / "test_data" return test_dir.resolve() @pytest.fixture(scope="session") def database(): return "jobflow_test" @pytest.fixture(scope="session") def mongo_jobstore(database): from maggma.stores import MongoStore from jobflow import JobStore store = JobStore(MongoStore(database, "outputs")) store.connect() return store @pytest.fixture(scope="function") def memory_jobstore(): from maggma.stores import MemoryStore from jobflow import JobStore store = JobStore(MemoryStore()) store.connect() return store @pytest.fixture(scope="function") def memory_data_jobstore(): from maggma.stores import MemoryStore from jobflow import JobStore store = JobStore(MemoryStore(), additional_stores={"data": MemoryStore()}) store.connect() return store @pytest.fixture def clean_dir(): import os import shutil import tempfile old_cwd = os.getcwd() newpath = tempfile.mkdtemp() os.chdir(newpath) yield os.chdir(old_cwd) shutil.rmtree(newpath) @pytest.fixture(scope="session") def debug_mode(): return False @pytest.fixture(scope="session") def lpad(database, debug_mode): from fireworks import LaunchPad lpad = LaunchPad(name=database) lpad.reset("", require_password=False) yield lpad if not debug_mode: lpad.reset("", require_password=False) for coll in lpad.db.list_collection_names(): lpad.db[coll].drop() @pytest.fixture def no_pydot(monkeypatch): import builtins import_orig = builtins.__import__ def mocked_import(name, *args, **kwargs): if name == "pydot": raise ImportError() return import_orig(name, *args, **kwargs) monkeypatch.setattr(builtins, "__import__", mocked_import) @pytest.fixture def no_matplotlib(monkeypatch): import builtins import_orig = builtins.__import__ def mocked_import(name, *args, **kwargs): if name == "matplotlib": raise ImportError() return import_orig(name, *args, **kwargs) monkeypatch.setattr(builtins, "__import__", mocked_import)
20.236842
78
0.686606
268
2,307
5.716418
0.279851
0.084856
0.082245
0.081593
0.605091
0.475849
0.451044
0.420366
0.420366
0.420366
0
0
0.205028
2,307
113
79
20.415929
0.835333
0
0
0.479452
0
0
0.051149
0
0
0
0
0
0
1
0.164384
false
0.027397
0.328767
0.027397
0.60274
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
b48ab31d2b65a280be63bfbc1d367523dc945d6a
141
py
Python
Python/Least_Common_Multiple_for_large_numbers.py
DeathcallXD/DS-Algo-Point
70bc4b98fa6648cdcb2e65bccaa8b30298f14d87
[ "MIT" ]
null
null
null
Python/Least_Common_Multiple_for_large_numbers.py
DeathcallXD/DS-Algo-Point
70bc4b98fa6648cdcb2e65bccaa8b30298f14d87
[ "MIT" ]
null
null
null
Python/Least_Common_Multiple_for_large_numbers.py
DeathcallXD/DS-Algo-Point
70bc4b98fa6648cdcb2e65bccaa8b30298f14d87
[ "MIT" ]
null
null
null
def GCD(a,b): if b == 0: return a else: return GCD(b, a%b) a = int(input()) b = int(input()) print(a*b//(GCD(a,b)))
14.1
26
0.460993
27
141
2.407407
0.407407
0.123077
0.153846
0
0
0
0
0
0
0
0
0.010309
0.312057
141
9
27
15.666667
0.659794
0
0
0
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0
0
0.375
0.125
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
81f67189d5d5520037d53dd853b5b30e2b4a3514
23,097
py
Python
test_fiona_issue383.py
thomasaarholt/fiona-wheels
50ff76691ea4286052d552ffb67d83894742b00a
[ "MIT" ]
null
null
null
test_fiona_issue383.py
thomasaarholt/fiona-wheels
50ff76691ea4286052d552ffb67d83894742b00a
[ "MIT" ]
null
null
null
test_fiona_issue383.py
thomasaarholt/fiona-wheels
50ff76691ea4286052d552ffb67d83894742b00a
[ "MIT" ]
null
null
null
import fiona d = { "type": "Feature", "id": "0", "properties": { "ADMINFORES": "99081600010343", "REGION": "08", "FORESTNUMB": "16", "FORESTORGC": "0816", "FORESTNAME": "El Yunque National Forest", "GIS_ACRES": 55829.81, "SHAPE_AREA": 0.0193062316937, "SHAPE_LEN": 0.754287568301, }, "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [-65.73293016000002, 18.33284838999998], [-65.73293445000002, 18.331367639999996], [-65.73189660000003, 18.331369719999998], [-65.73040952000002, 18.33137273], [-65.72620770999998, 18.33138113000001], [-65.72303074000001, 18.331387389999975], [-65.71763471000003, 18.331393549999973], [-65.71717587, 18.331394069999988], [-65.71297922999997, 18.331403290000026], [-65.71248787000002, 18.33140437999998], [-65.70898332000002, 18.33141236], [-65.70846269999998, 18.331413540000028], [-65.70470655999998, 18.331422009999983], [-65.70340513999997, 18.33142491000001], [-65.70268779000003, 18.331419400000016], [-65.70098910000002, 18.33140635000001], [-65.69978839999999, 18.33139711000001], [-65.69977925, 18.32948927000001], [-65.69976860000003, 18.32723274], [-65.69976336000002, 18.326155840000013], [-65.69975882, 18.32519180999998], [-65.69975420999998, 18.324281380000002], [-65.69975116, 18.323670390000018], [-65.69974878, 18.323214399999983], [-65.69972460999998, 18.317907339999977], [-65.69972661000003, 18.31559458999999], [-65.69972832000002, 18.314692869999988], [-65.69972934999998, 18.312400700000012], [-65.69973214999999, 18.309193600000015], [-65.69973189000001, 18.308128119999992], [-65.69971594999998, 18.304170699999986], [-65.69971009, 18.302713270000027], [-65.69969680999998, 18.29942688], [-65.69968705999997, 18.297028839999996], [-65.69968439000002, 18.294420890000026], [-65.69968401, 18.294158770000024], [-65.69968397000002, 18.29406161000003], [-65.69968146999997, 18.29031968999999], [-65.69967542, 18.286261500000023], [-65.6996757, 18.286123120000013], [-65.69967338999999, 18.284205750000012], [-65.69967251000003, 18.283497660000023], [-65.69967014000002, 18.281735219999973], [-65.69967000000003, 18.28134633000002], [-65.69994827, 18.28134559], [-65.70099542999998, 18.28134276999998], [-65.70358926, 18.28133575999999], [-65.70616948000003, 18.281328770000016], [-65.70911901, 18.28132070999999], [-65.70971071999998, 18.28131909000001], [-65.71624101999998, 18.28131652000002], [-65.71624542, 18.276418089999993], [-65.71624548, 18.27636744], [-65.71624578000001, 18.275968209999974], [-65.71624845000002, 18.27300660999998], [-65.71624307000002, 18.271180739999977], [-65.71623899999997, 18.26979332000002], [-65.71623254999997, 18.267581380000024], [-65.71623254999997, 18.267578500000013], [-65.71623402, 18.267040029999976], [-65.71623762000002, 18.265657929999975], [-65.71623955000001, 18.26496930000002], [-65.71624981999997, 18.260115170000006], [-65.71625891999997, 18.257678180000028], [-65.71625689000001, 18.25766888999999], [-65.71628033000002, 18.252014929999973], [-65.71628700000002, 18.250603020000028], [-65.71629617000002, 18.248364939999988], [-65.71629643, 18.248011659999975], [-65.71974196999997, 18.248007089999987], [-65.72038055000002, 18.24800706000002], [-65.72076942000001, 18.24800829999998], [-65.72464429000001, 18.248011910000002], [-65.72465315, 18.248011519999977], [-65.72509256000001, 18.24801222000002], [-65.72707300000002, 18.24801083], [-65.73231042999998, 18.2480104], [-65.73397174000002, 18.248009190000005], [-65.73705114, 18.248008589999984], [-65.73750502000001, 18.248008190000007], [-65.73889711999999, 18.24800842000002], [-65.73978022, 18.248008830000003], [-65.74408667, 18.248010669999985], [-65.74502591999999, 18.248009980000006], [-65.74623288999999, 18.248009120000006], [-65.74772324000003, 18.248009149999973], [-65.74924592000002, 18.248014580000017], [-65.74961603999998, 18.248013990000004], [-65.74961524000003, 18.244120570000007], [-65.74961268999999, 18.243257019999987], [-65.74961502999997, 18.235669789999974], [-65.74961267999998, 18.235211540000023], [-65.74961048, 18.234789499999977], [-65.74961128000001, 18.231243000000006], [-65.75090724, 18.231235679999998], [-65.75247086000002, 18.231236500000023], [-65.75309636999998, 18.231236850000016], [-65.75896512000003, 18.231239829999993], [-65.76053288000003, 18.231240590000027], [-65.76145975999998, 18.231241049999994], [-65.76266423999999, 18.23124161999999], [-65.76402088999998, 18.231242259999988], [-65.76422652999997, 18.231242339999994], [-65.76459129, 18.231242520000023], [-65.76506522, 18.231243529999972], [-65.76575971, 18.231245], [-65.77265518000002, 18.231259480000006], [-65.77609515, 18.23126751000001], [-65.77853763000002, 18.231273129999977], [-65.78301661, 18.231283440000027], [-65.78536026, 18.231288749999976], [-65.78565572000002, 18.231289430000004], [-65.78587555000001, 18.23129019999999], [-65.78745778000001, 18.23129352000001], [-65.79147775000001, 18.231303949999983], [-65.80175496999999, 18.23133021000001], [-65.80328739999999, 18.23133408000001], [-65.80925552999997, 18.23135074999999], [-65.81185003000002, 18.231357919999994], [-65.81302187, 18.231352949999973], [-65.81574820999998, 18.23134140000002], [-65.81705820000002, 18.231335829999978], [-65.81733358000002, 18.231334670000024], [-65.82028713, 18.231322050000017], [-65.82052381, 18.23132104000001], [-65.82337763999999, 18.23130882999999], [-65.82649563000001, 18.231295439999997], [-65.82811142999998, 18.231288459999973], [-65.83293057999998, 18.23127384999998], [-65.83292964999998, 18.231761140000003], [-65.83293025, 18.234220730000004], [-65.83292996, 18.23624890000002], [-65.83292955000002, 18.239821380000024], [-65.83292905000002, 18.244286690000024], [-65.83292845, 18.244807849999972], [-65.83292886999999, 18.245117160000007], [-65.83292883000001, 18.24573097000001], [-65.83292870999998, 18.247063589999982], [-65.83292857999999, 18.248008060000018], [-65.83315374, 18.248008760000005], [-65.83325909000001, 18.248009089999982], [-65.83590992, 18.248030509999978], [-65.84442614, 18.248036909999996], [-65.84617400000002, 18.248038199999996], [-65.84807433999998, 18.24803958000001], [-65.84813063000001, 18.248039609999978], [-65.84903366999998, 18.248040240000023], [-65.85197088000001, 18.24804229], [-65.85535651999999, 18.24804193], [-65.85613706999999, 18.248041839999985], [-65.85719701, 18.248041699999987], [-65.8638446, 18.24804075999998], [-65.86544515000003, 18.24804051000001], [-65.87069150999997, 18.248039570000003], [-65.87385301, 18.248038310000027], [-65.87461352999998, 18.248020329999974], [-65.87817146999998, 18.248007959999995], [-65.88441703000001, 18.24800984000001], [-65.89088908999997, 18.248012580000022], [-65.89899125, 18.248013500000013], [-65.89925985999997, 18.24801395999998], [-65.90513017, 18.248014790000013], [-65.90874113000001, 18.248012710000012], [-65.91595359000002, 18.248011819999988], [-65.91629429, 18.248011819999988], [-65.9162887, 18.250010359999976], [-65.9162852, 18.25164811000002], [-65.91628292000001, 18.25191947000002], [-65.91627997, 18.253774229999976], [-65.91627848000002, 18.25477933000002], [-65.91627578999999, 18.255991100000017], [-65.91626445999998, 18.261137089999977], [-65.91625448000002, 18.26512563], [-65.91625524, 18.26536785000002], [-65.91625922999998, 18.266019389999997], [-65.91632637999999, 18.266198929999973], [-65.91632625, 18.266542049999998], [-65.91631202000002, 18.267959780000012], [-65.91631167000003, 18.267977850000022], [-65.91630744000003, 18.268755800000008], [-65.91630715999997, 18.268808560000025], [-65.91625932, 18.270663520000028], [-65.91625911, 18.270671989999983], [-65.91625876, 18.270887870000024], [-65.91625875, 18.27455298000001], [-65.91625871999997, 18.274613149999993], [-65.91625811, 18.279979179999998], [-65.91626000000002, 18.280340190000004], [-65.91625800000003, 18.281121770000027], [-65.91625804, 18.281356930000015], [-65.91618933000001, 18.281356570000014], [-65.91500064000002, 18.281350369999984], [-65.91296770999998, 18.281339800000012], [-65.91253340999998, 18.281337529999973], [-65.91229578999997, 18.281336280000005], [-65.90998387000002, 18.281324219999988], [-65.90871597, 18.281318759999976], [-65.90216367, 18.28129032999999], [-65.90111256, 18.281285760000003], [-65.89913740999998, 18.28127711000002], [-65.89885119000002, 18.28127286], [-65.89237293000002, 18.281247450000023], [-65.89048616000002, 18.281239140000025], [-65.88711766, 18.28122424999998], [-65.88599235999999, 18.281219249999992], [-65.88291291000002, 18.28120555999999], [-65.88291178999998, 18.28584490999998], [-65.88291048999997, 18.291010749999998], [-65.88290905000002, 18.29165870999998], [-65.88291565999998, 18.302684020000015], [-65.88291612, 18.303763930000002], [-65.88291874999999, 18.31314200999998], [-65.88292098, 18.314737100000002], [-65.88292178, 18.316319510000028], [-65.88292336, 18.320099939999977], [-65.88292583999998, 18.325711160000026], [-65.88292658, 18.32707603], [-65.88292819999998, 18.330798640000012], [-65.88292837, 18.331260059999977], [-65.88087401000001, 18.331255440000007], [-65.87894735999998, 18.331251090000023], [-65.87603802000001, 18.33124448000001], [-65.87461601000001, 18.33124122999999], [-65.86804993999999, 18.331420340000022], [-65.86763531000003, 18.331420009999988], [-65.86672666999999, 18.33141931], [-65.86648867999997, 18.331419100000005], [-65.86635653000002, 18.331419170000004], [-65.86273363999999, 18.331421009999985], [-65.85793086000001, 18.331423389999998], [-65.85789242999999, 18.33142171999998], [-65.85542400000003, 18.331424019999986], [-65.85350249999999, 18.331425749999994], [-65.84982063000001, 18.33142908000002], [-65.84969439000002, 18.331429189999994], [-65.84969428, 18.331550279999988], [-65.84969804000002, 18.33796344000001], [-65.84969840999997, 18.338737999999978], [-65.8497021, 18.345083629999976], [-65.84970268000001, 18.346151969999994], [-65.84970370000002, 18.34806388999999], [-65.84281220000003, 18.348051429999998], [-65.83631126, 18.348039400000005], [-65.83572038, 18.348038309999993], [-65.82972193, 18.348027020000018], [-65.82915395999999, 18.348025940000014], [-65.82799924, 18.34802375999999], [-65.82479099, 18.34801637999999], [-65.82399432, 18.34801453], [-65.82321229000001, 18.348012719999986], [-65.82141923, 18.348008540000023], [-65.82131368, 18.34800831000001], [-65.81955477000002, 18.348004189999983], [-65.81593006999998, 18.347995690000005], [-65.81524768000003, 18.347994099999994], [-65.81430688, 18.347991850000028], [-65.81409592, 18.34799134000002], [-65.81219464999998, 18.347986839999976], [-65.81037927, 18.347982520000016], [-65.80875237999999, 18.347978650000016], [-65.80848982999998, 18.34797801000002], [-65.80829098999999, 18.347977609999987], [-65.80772302000003, 18.347976930000016], [-65.80733909999998, 18.34797567999999], [-65.80353065000003, 18.347967859999983], [-65.80071562, 18.347962040000027], [-65.79902959999998, 18.34795853999998], [-65.79798546, 18.34795637000002], [-65.79009180999998, 18.347941110000022], [-65.78932427000001, 18.347939639999993], [-65.78840032, 18.347937820000027], [-65.78753816, 18.347936129999994], [-65.78601164000003, 18.347933119999993], [-65.78038322999998, 18.347921919999976], [-65.77934201, 18.347919479999973], [-65.77871169000002, 18.347918520000007], [-65.77776547000002, 18.347916520000012], [-65.77676473999998, 18.347914670000023], [-65.77662666999998, 18.347914370000012], [-65.77532722000001, 18.347911739999972], [-65.77499889, 18.347911039999985], [-65.77385053, 18.347908700000005], [-65.77354066999999, 18.34790806000001], [-65.76955748, 18.347899840000025], [-65.76888499, 18.347898439999994], [-65.76835487, 18.347897349999982], [-65.76683013000002, 18.34789416000001], [-65.76222604999998, 18.347884490000013], [-65.75909141, 18.347877840000024], [-65.75869390000003, 18.347874339999976], [-65.75078702000002, 18.34780397999998], [-65.74961532999998, 18.347793539999998], [-65.74804139999998, 18.347743690000016], [-65.74783091, 18.347737010000003], [-65.74728348000002, 18.347736259999976], [-65.74297489999998, 18.347730169999977], [-65.74044021999998, 18.347710549999988], [-65.73974084000002, 18.347705140000016], [-65.73561567000002, 18.34767314999999], [-65.73484725999998, 18.347665380000024], [-65.73302854000002, 18.347646950000012], [-65.73294028999999, 18.347646069999996], [-65.73293561999998, 18.346632310000018], [-65.73292482, 18.344269059999988], [-65.73292071999998, 18.343373789999987], [-65.73291719000002, 18.34259155000001], [-65.73290365999998, 18.339655180000022], [-65.73291784000003, 18.337885169999993], [-65.73292518, 18.334980180000002], [-65.73292579000002, 18.334753429999978], [-65.73293016000002, 18.33284838999998], ] ], [ [ [-66.16262245000001, 18.051031109999997], [-66.16184043999999, 18.049737929999992], [-66.1619091, 18.04731941], [-66.16514587, 18.04502678], [-66.16511536000002, 18.044198989999984], [-66.16511725999999, 18.043462750000003], [-66.16511725999999, 18.043279649999988], [-66.16594887000002, 18.04355812], [-66.16832161000002, 18.041448590000016], [-66.16813087000003, 18.040346150000005], [-66.16640091, 18.04031180999999], [-66.16698073999999, 18.03862952999998], [-66.16720580999998, 18.037527080000018], [-66.16765975999999, 18.033853529999988], [-66.16861915999999, 18.034097669999994], [-66.16942024000002, 18.033731460000013], [-66.16954613000001, 18.03507804999998], [-66.16970443999998, 18.036489490000008], [-66.16989517000002, 18.037008290000017], [-66.17005347999998, 18.038480760000027], [-66.17072487000002, 18.03927802999999], [-66.17091750999998, 18.039522169999998], [-66.17117309999998, 18.039552689999994], [-66.17162131999999, 18.039552689999994], [-66.17216492, 18.039308549999987], [-66.17245293000002, 18.039155960000016], [-66.17293358, 18.039094920000025], [-66.17320251000001, 18.039094920000025], [-66.17344666000002, 18.039094920000025], [-66.17376709000001, 18.03928185000001], [-66.17305756000002, 18.042036059999987], [-66.17280005999999, 18.04304695000002], [-66.17234993, 18.044912339999996], [-66.17170142999998, 18.050027849999992], [-66.17182922, 18.050394059999974], [-66.17035484000002, 18.051618580000024], [-66.16718483, 18.05198096999999], [-66.16692733999997, 18.051458360000026], [-66.16661072, 18.050817489999986], [-66.16660117999999, 18.050874710000016], [-66.16659355000002, 18.05092811999998], [-66.16641808000003, 18.052057269999978], [-66.16641426000001, 18.052072529999975], [-66.16576958000002, 18.05623436000002], [-66.16262245000001, 18.051031109999997], ] ], [ [ [-66.53508758999999, 18.392507550000005], [-66.53519820999998, 18.391786579999973], [-66.53970336999998, 18.392427440000006], [-66.53828812, 18.397306440000023], [-66.53822708000001, 18.39755821], [-66.53777313, 18.398542399999997], [-66.53761481999999, 18.400304790000007], [-66.53463554000001, 18.40027046], [-66.53440475000002, 18.399271010000007], [-66.53497124, 18.39718819000001], [-66.53505897999997, 18.396612170000026], [-66.53450774999999, 18.395158770000023], [-66.53466796999999, 18.394887919999974], [-66.53466796999999, 18.39454841999998], [-66.53477286999998, 18.394208909999975], [-66.53480911000003, 18.393922809999992], [-66.53482628, 18.39348030000002], [-66.5349865, 18.393175129999975], [-66.53508758999999, 18.392507550000005], ] ], ], }, } from shapely.geometry import shape print(shape(d["geometry"]))
55.789855
61
0.50972
1,546
23,097
7.613195
0.50194
0.004333
0.004843
0.005438
0.005947
0
0
0
0
0
0
0.796774
0.371953
23,097
413
62
55.924939
0.014615
0
0
0.02445
0
0
0.00814
0
0
0
0
0
0
1
0
false
0
0.00489
0
0.00489
0.002445
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
81f8ba9bf744d6a257f9ade3faa2ce783ae335eb
2,317
py
Python
vantage6/server/model/organization.py
jaspersnel/vantage6-server
88ad40d23cc36eaba57c170929f7ccdd0011720a
[ "Apache-2.0" ]
2
2020-10-19T08:59:08.000Z
2022-03-07T10:30:21.000Z
vantage6/server/model/organization.py
jaspersnel/vantage6-server
88ad40d23cc36eaba57c170929f7ccdd0011720a
[ "Apache-2.0" ]
67
2020-04-15T09:43:31.000Z
2022-03-18T08:29:17.000Z
vantage6/server/model/organization.py
jaspersnel/vantage6-server
88ad40d23cc36eaba57c170929f7ccdd0011720a
[ "Apache-2.0" ]
2
2021-01-21T15:09:26.000Z
2021-04-19T14:58:10.000Z
import base64 from sqlalchemy import Column, String, LargeBinary from sqlalchemy.orm import relationship from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm.exc import NoResultFound from vantage6.common.globals import STRING_ENCODING from .base import Base, Database class Organization(Base): """A legal entity. An organization plays a central role in managing distributed tasks. Each Organization contains a public key which other organizations can use to send encrypted messages that only this organization can read. """ # fields name = Column(String) domain = Column(String) address1 = Column(String) address2 = Column(String) zipcode = Column(String) country = Column(String) _public_key = Column(LargeBinary) # relations collaborations = relationship("Collaboration", secondary="Member", back_populates="organizations") results = relationship("Result", back_populates="organization") nodes = relationship("Node", back_populates="organization") users = relationship("User", back_populates="organization") created_tasks = relationship("Task", back_populates="initiator") roles = relationship("Role", back_populates="organization") @classmethod def get_by_name(cls, name): session = Database().Session try: return session.query(cls).filter_by(name=name).first() except NoResultFound: return None @hybrid_property def public_key(self): if self._public_key: # TODO this should be fixed properly try: return base64.b64decode(self._public_key)\ .decode(STRING_ENCODING) except Exception: return "" else: return "" @public_key.setter def public_key(self, public_key_b64): """Assumes that the public key is in b64-encoded.""" self._public_key = base64.b64decode( public_key_b64.encode(STRING_ENCODING) ) def __repr__(self): number_of_users = len(self.users) return ( "<Organization " f"name:{self.name}, " f"domain:{self.domain}, " f"users:{number_of_users}" ">" )
30.893333
76
0.643073
247
2,317
5.878543
0.445344
0.068182
0.068871
0.022039
0
0
0
0
0
0
0
0.011223
0.269314
2,317
74
77
31.310811
0.846426
0.139404
0
0.075472
0
0
0.096232
0.022403
0
0
0
0.013514
0
1
0.075472
false
0
0.132075
0
0.584906
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
81fea7350bf3a22df6647f4ff0e42232c0fd7743
191
py
Python
spinesTS/utils/_validation.py
BirchKwok/spinesTS
b88ec333f41f58979e0570177d1fdc364d976056
[ "Apache-2.0" ]
2
2021-08-15T09:29:37.000Z
2022-03-10T13:56:13.000Z
spinesTS/utils/_validation.py
BirchKwok/spinesTS
b88ec333f41f58979e0570177d1fdc364d976056
[ "Apache-2.0" ]
null
null
null
spinesTS/utils/_validation.py
BirchKwok/spinesTS
b88ec333f41f58979e0570177d1fdc364d976056
[ "Apache-2.0" ]
null
null
null
import numpy as np def check_x_y(x, y): assert isinstance(x, np.ndarray) and isinstance(y, np.ndarray) assert np.ndim(x) <= 3 and np.ndim(y) <= 2 assert len(x) == len(y)
23.875
67
0.612565
35
191
3.285714
0.457143
0.034783
0
0
0
0
0
0
0
0
0
0.013793
0.240838
191
7
68
27.285714
0.77931
0
0
0
0
0
0
0
0
0
0
0
0.6
1
0.2
false
0
0.2
0
0.4
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
2
c304d7f7756e66ca85c0f39399c15dd4c7181588
1,329
py
Python
collegiate-explorer-admin/cc_admin/cc_admin/test.py
Chit-Chaat/Collegiate_Explorer_APP
f30171d01fec62a836332b5508374144fbb487c7
[ "MIT" ]
3
2021-05-24T23:06:40.000Z
2021-11-08T10:32:42.000Z
collegiate-explorer-admin/cc_admin/cc_admin/test.py
Chit-Chaat/Collegiate_Explorer_APP
f30171d01fec62a836332b5508374144fbb487c7
[ "MIT" ]
4
2020-10-12T03:00:43.000Z
2020-11-17T01:47:56.000Z
collegiate-explorer-admin/cc_admin/cc_admin/test.py
Chit-Chaat/Collegiate_Explorer_APP
f30171d01fec62a836332b5508374144fbb487c7
[ "MIT" ]
2
2021-03-01T15:30:26.000Z
2022-01-13T21:30:20.000Z
__author__ = 'Aaron Yang' __email__ = 'byang971@usc.edu' __date__ = '10/28/2020 4:52 PM' # import re # # # def format_qs_score(score_str): # """ # help you generate a qs score # 1 - 100 : 5 # 141-200 : 4 # =100: 4 # N/A 3 # :param score_str: # :return: # """ # score = 3 # if not score_str or score_str != "N/A": # try: # parts = int(list(filter(lambda val: val, # list(re.split('-|=', score_str))))[0]) # except: # return 3 # score = 5 - int(parts / 100) # if score > 5 or score < 1: # return 3 # return score # # # print(format_qs_score("=100")) # # print(list(filter(lambda val: val, re.split('-|=', "=100")))) # import csv # import numpy as np # import requests # # with open('./college_explorer.csv', newline='', encoding='utf-8') as file: # data = list(csv.reader(file)) # data = np.array(data) # img_list = data[1:, 33].tolist() # # img_list = list(filter(lambda url: url != 'N/A', img_list)) # # # for url in img_list: # response = requests.get(url) # if response.status_code == 200: # school_name = url.split('/')[-1].split('_')[0] # with open("./images/" + school_name + ".jpg", 'wb') as f: # f.write(response.content)
26.058824
76
0.527464
179
1,329
3.748603
0.463687
0.059613
0.071535
0.056632
0.065574
0
0
0
0
0
0
0.059511
0.291949
1,329
51
77
26.058824
0.65356
0.86155
0
0
1
0
0.321168
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c332b852b9ea902789bff01e3510374ac9b4407d
106
py
Python
variables.py
MuhweziDeo/python_refresher
0d100f88524ff780f1cee8afabfee1025c648f8b
[ "MIT" ]
null
null
null
variables.py
MuhweziDeo/python_refresher
0d100f88524ff780f1cee8afabfee1025c648f8b
[ "MIT" ]
null
null
null
variables.py
MuhweziDeo/python_refresher
0d100f88524ff780f1cee8afabfee1025c648f8b
[ "MIT" ]
null
null
null
x = 2 print(x) # multiple assignment a, b, c, d = (1, 2, 5, 9) print(a, b, c, d) print(type(str(a)))
8.833333
25
0.528302
23
106
2.434783
0.608696
0.071429
0.107143
0.142857
0
0
0
0
0
0
0
0.0625
0.245283
106
11
26
9.636364
0.6375
0.179245
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.6
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
c337200f464a7d012b7b952e50ed5709111473ef
7,996
py
Python
cradlepy/framework/http.py
cblanquera/cradlepy
1634fe38a0cc58f92dbfc2b0c84ace9d16821c3c
[ "MIT" ]
null
null
null
cradlepy/framework/http.py
cblanquera/cradlepy
1634fe38a0cc58f92dbfc2b0c84ace9d16821c3c
[ "MIT" ]
null
null
null
cradlepy/framework/http.py
cblanquera/cradlepy
1634fe38a0cc58f92dbfc2b0c84ace9d16821c3c
[ "MIT" ]
null
null
null
from .request import Request from .response import Response class HttpRequestCookieTrait: 'Designed for the Request Object; Adds methods to store COOKIE data' def get_cookies(self, *args): 'Returns COOKIE given name or all COOKIE' return self.get('cookie', *args) def remove_cookies(self, *args): 'Removes COOKIE given name or all COOKIE' return self.remove('cookie', *args) def has_cookies(self, *args): 'Returns true if has COOKIE given name or if COOKIE is set' return self.exists('cookie', *args) def set_cookies(self, data, *args): 'Sets COOKIE' if isinstance(data, (list, dict, tuple)): return self.set('cookie', data) if len(args) == 0: return self return self.set('cookie', data, *args) class HttpRequestGetTrait: 'Designed for the Request Object; Adds methods to store GET data' def get_get(self, *args): 'Returns GET given name or all GET' return self.get('get', *args) def remove_get(self, *args): 'Removes GET given name or all GET' return self.remove('get', *args) def has_get(self, *args): 'Returns true if has GET given name or if GET is set' return self.exists('get', *args) def set_get(self, data, *args): 'Sets GET' if isinstance(data, (list, dict, tuple)): return self.set('get', data) if len(args) == 0: return self return self.set('get', data, *args) class HttpRequestPostTrait: 'Designed for the Request Object; Adds methods to store POST data' def get_post(self, *args): 'Returns POST given name or all POST' return self.get('post', *args) def remove_post(self, *args): 'Removes POST given name or all POST' return self.remove('post', *args) def has_post(self, *args): 'Returns true if has POST given name or if POST is set' return self.exists('post', *args) def set_post(self, data, *args): 'Sets POST' if isinstance(data, (list, dict, tuple)): return self.set('post', data) if len(args) == 0: return self return self.set('post', data, *args) class HttpRequestServerTrait: 'Designed for the Request Object; Adds methods to store SERVER data' def get_method(self): 'Returns method if set' pass def get_path(self, name = None): 'Returns path data given name or all path data' pass def get_query(self): 'Returns string query if set' pass def get_server(self, name = None): 'Returns SERVER data given name or all SERVER data' pass def has_server(self, name = None): 'Returns SERVER data given name or all SERVER data' pass def is_method(self, method): 'Returns true if method is the one given' pass def set_method(self, method): 'Sets request method' pass def set_path(self, path): 'Sets path given in string or array form' pass def set_query(self, query): 'Sets query string' pass def set_server(self, server): 'Sets SERVER' pass class HttpRequestSessionTrait: 'Designed for the Request Object; Adds methods to store SESSION data' def get_session(self, *args): 'Returns SESSION given name or all SESSION' return self.get('session', *args) def remove_session(self, *args): 'Removes SESSION given name or all SESSION' self.remove('session', *args) #TODO: link session object return self def has_session(self, *args): 'Returns true if has SESSION given name or if SESSION is set' return self.exists('session', *args) def set_session(self, data, *args): 'Sets SESSION' if isinstance(data, (list, dict, tuple)): return self.set('session', data) if len(args) == 0: return self self.set('session', data, *args) #TODO: link session object return self class HttpRequest( Request, HttpRequestCookieTrait, HttpRequestGetTrait, HttpRequestPostTrait, HttpRequestServerTrait, HttpRequestSessionTrait ): 'Http Request Object' def load(self): 'Loads default data given by WSGI' pass class HttpResponseHeaderTrait: 'Designed for the Response Object; Adds methods to process headers' def add_header(self, name, value = None): 'Adds a header parameter' pass def get_headers(self, name = None): 'Returns either the header value given the name or the all headers' pass def remove_header(self, name): 'Removes a header parameter' pass class HttpResponsePageTrait: 'Designed for the Response Object; Adds methods to process REST type responses' def add_meta(self, name, content): 'Adds a page meta item' pass def get_flash(self): 'Returns flash data' pass def get_meta(self, *args): 'Returns meta given path or all meta data' pass def get_page(self, *args): 'Returns page data given path or all page data' pass def has_page(self, *args): 'Returns true if theres any page data' pass def remove_page(self, *args): 'Removes arbitrary page data' pass def set_flash(self, message, type = 'info'): 'Sets a Page flash' pass def set_page(self, *args): 'Sets arbitrary page data' pass def set_title(self, title): 'Sets a Page title' pass class HttpResponseStatusTrait: 'Designed for the Response Object; Adds methods to process status codes' def get_status(self): 'Returns the status code' pass def set_status(self, code, status): 'Sets a status code' pass class HttpResponse( Response, HttpResponseHeaderTrait, HttpResponsePageTrait, HttpResponseStatusTrait ): 'Http Response Object' def load(self): 'Loads default data' pass class HttpRouterTrait: 'Designed for the HttpHandler we are parting this out to lessen the confusion' def all(self, path, callback): 'Adds routing middleware for all methods' pass def delete(self, path, callback): 'Adds routing middleware for delete method' pass def get(self, path, callback): 'Adds routing middleware for get method' pass def get_router(self): 'Returns a router object' pass def post(self, path, callback): 'Adds routing middleware for post method' pass def put(self, path, callback): 'Adds routing middleware for put method' pass def route(self, method, path, callback): 'Adds routing middleware' pass def set_router(self, router): 'Sets the router to use' pass def trigger_route(self, method, path, *args): 'Manually trigger a route' pass class HttpRouterInterface: 'Handles method-path matching and routing' def __init__(self, handler = None): 'Allow to pass a custom EventHandler' pass def process(self, request, *args): 'Process routes' pass def route(self, method, pattern, callback): 'Adds routing middleware' pass class HttpRouter(HttpRouterInterface): 'Handles method-path matching and routing' def __init__(self, handler = None): 'Allow to pass a custom EventHandler' pass def process(self, request, *args): 'Process routes' pass def route(self, method, pattern, callback): 'Adds routing middleware' pass class HttpDispatcher: pass class HttpHandler: pass class HttpDispatcherTrait: pass class HttpTrait: pass
23.380117
83
0.617184
999
7,996
4.888889
0.131131
0.045864
0.033784
0.031532
0.454955
0.403767
0.361589
0.287879
0.247748
0.137797
0
0.000715
0.30065
7,996
341
84
23.44868
0.872675
0.325913
0
0.337553
0
0
0.326032
0
0
0
0
0.002933
0
1
0.240506
false
0.198312
0.008439
0
0.421941
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
c3386296bb9f34b0112bf5ce7c89306471d38bbf
4,311
py
Python
conjuntos.py
Tiesco789/guppe
464702a2d618e149439a9b5c763f82c5376d2c32
[ "MIT" ]
null
null
null
conjuntos.py
Tiesco789/guppe
464702a2d618e149439a9b5c763f82c5376d2c32
[ "MIT" ]
null
null
null
conjuntos.py
Tiesco789/guppe
464702a2d618e149439a9b5c763f82c5376d2c32
[ "MIT" ]
null
null
null
""" Conjuntos — Conjunto em qualquer linguagem de programação, estamos fazendo referência à teoria de conjuntos da matemática — Aqui no Python, os conjuntos são chamados de sets Dito isto, da mesma forma que na matemática: — Sets (conjuntos) não possuem valores duplicados; — Sets (conjuntos) não possuem valores ordenados; — Elementos não são acessados via índice, ou seja, conjuntos não são indexados; Conjuntos são bons para se utilizar quando precisamos armazenar elementos mas não nos importamos com a ordenaçào deles. Quando não precisamos se preocupar com chaves, valores e itens duplicados Os conjuntos (sets) são referenciados em python com chaves {} Diferença entre conjutnos (sets) e mapas (dicionários) em python: — Um dicionário tem chave/valor — Um conjunto tem apenas valor # Definindo um conjunto # Forma 1 s = set({1, 2, 3, 4, 5, 6, 7, 2, 3}) # Repare que temos valores repetidos print(s) print(type(s)) # OBS: Ao criar uim conjunto, caso seja adicionado um valor já existente, o mesmo será ignorado sem gerar error e nào fará parde do conjunto # Forma 2 s = {1, 2, 3, 4, 5, 5} print(s) print(type(s)) # Podemos verificar se um determinado valor está contido em um conjunto if 3 in s: print('Encontrei o valor 3') else: print('Não encontrei o valor 3') # Importante lembrar que, alem de não termos valores duplicados, os valores não são ordenados dados = 99, 2, 34, 23, 2, 12, 1, 44, 5, 34 # Listas aceitam valores duplicados, então temos 10 elementos lista = list(dados) print(f"Lista: {lista} com {len(lista)} elementos") # Tuplas aceitam valores duplicados, então temos 10 elementos tupla = tuple(dados) print(f"Tupla: {tupla} com {len(tupla)} elementos") # Dicionários não aceitam chaves duplicadas, então temos 8 elementos dicionario = {}.fromkeys(dados, 'dict') print(f"Dicionário: {dicionario} com {len(dicionario)} elementos") # Conjuntos não aceitam valores duplicados, então temos 8 elementos conjunto = set(dados) print(f"Conjunto: {conjunto} com {len(conjunto)} elementos") # Assim como os outros conjuntos python, podemos colocar tipos de dados misturados em Sets s = {1, 'b', True, 1.23, 44} print(s) print(type(s)) # Podemos iterar em um set normalmente for valor in s: print(valor) # Usos interessantes com sets # Imagine que fizemos um formulário de cadastro de visitantes em uma feira ou museu, # os visitantes informam manualmente a cidade de onde vieram # Nós adicionamos cada cidade em uma lista Python, já que em uma lista podemos adicionar novos elmentos e ter repetições cidades = ['Belo Horizante', 'São Paulo', 'Campo Grande', 'Cuiaba', 'Campo Grande', 'São Paulo', 'Cuiaba'] print(cidades) print(len(cidades)) # Agora precisamos saber quantas cidades distintas, ou seja, únicas, temos. # O que você faria? Faria um loop na lista? # Podemos utilizar o set para isso print(len(set(cidades))) s = {1, 2, 3} s.add(4) print(s) s = {1, 2, 3} s.remove(3) print(s) s.discard(2) print(s) # Copiando um conjunto para outro # Forma 1 - Deep Copy novo = s.copy() print(novo) novo.add(4) print(novo) print(s) # Forma 2 - Shallow Copy novo = s novo.add(4) print(novo) print(s) s = {1, 2, 3} print(s) s.clear() print(s) # Precisamos gerar qum conjunto com nomes de estudantes únicos # Forma 1 - Utilizando union # unicos1 = estudantes_python.union(estudantes_java) # print(unicos1) # Forma 2 - Utilizando o | pipe unicos2 = estudantes_python | estudantes_java print(unicos2) # Gerar um conjunto de estudantes que estão em ambos os cursos # Forma 1 - Utilizando union ambos1 = estudantes_python.intersection(estudantes_java) print(ambos1) # Forma 2 - utilizando o & ambos2 = estudantes_python & estudantes_java print(ambos2) # Métodos matemáticos de conjuntos # Imagine que temos dois conjuntos: um contendo estudantes do curso Python e um # Contendo estudantes do curso Java estudantes_python = {'Pedro', 'Maria', 'Cláudia', 'João', 'Marcos', 'Patricia'} estudantes_java = {'Ana', 'Maria', 'Cláudia', 'João', 'Marcos', 'Patricia'} # Veja que alguns alins que estudam python também estudam java. # Gerar um conjunto de estudantes que não estão no outro curso so_python = estudantes_python.difference(estudantes_java) print(so_python) so_java = estudantes_java.difference(estudantes_python) print(so_java) """
27.812903
140
0.740895
662
4,311
4.808157
0.353474
0.01885
0.004713
0.005027
0.178448
0.099906
0.042727
0
0
0
0
0.02114
0.166087
4,311
154
141
27.993506
0.862309
0.997448
0
null
0
null
0
0
null
0
0
0.006494
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
c34deabfbf09d812a3e974c9b52d0665996b8dda
1,095
py
Python
apps/cars/tests/api/abstract/abstract_base_api_test.py
agorsk1/car-rating-app
354c5933f4cbad69c9a57d1839f9086cd5cf9a1d
[ "MIT" ]
1
2022-03-03T11:15:25.000Z
2022-03-03T11:15:25.000Z
apps/cars/tests/api/abstract/abstract_base_api_test.py
agorsk1/car-rating-app
354c5933f4cbad69c9a57d1839f9086cd5cf9a1d
[ "MIT" ]
null
null
null
apps/cars/tests/api/abstract/abstract_base_api_test.py
agorsk1/car-rating-app
354c5933f4cbad69c9a57d1839f9086cd5cf9a1d
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod from django.test import TestCase from rest_framework.generics import GenericAPIView from rest_framework.test import APIRequestFactory from apps.cars.factory import UserFactory class AbstractBaseTest(object): class AbstractBaseApiTestCase(TestCase, ABC): """ Abstract Base TestCase class. """ def setUp(self) -> None: """Base setup""" self.user = UserFactory.create() self.request_factory = APIRequestFactory() self.view = self._view() self.endpoint = self._endpoint() @abstractmethod def _view(self) -> GenericAPIView.as_view(): """Abstract method that returns YourApiToTest.as_view()""" pass @abstractmethod def _endpoint(self) -> str: """Abstract method that return endpoint string E.g /cars/""" pass @abstractmethod def test_anonymous_request(self, *args, **kwargs) -> None: """test if anonymous user cannot access endpoint""" pass
29.594595
72
0.621918
108
1,095
6.203704
0.444444
0.035821
0.050746
0
0
0
0
0
0
0
0
0
0.286758
1,095
36
73
30.416667
0.857875
0.177169
0
0.285714
0
0
0
0
0
0
0
0
0
1
0.190476
false
0.142857
0.238095
0
0.52381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
c3556300b12020a7a08798e9741d8eecbab08f07
1,555
py
Python
circuitpython/schedule.py
Flameeyes/birch-books-smarthome
245a8afc848b2a8cf4dbcde31b36716b44937200
[ "MIT" ]
null
null
null
circuitpython/schedule.py
Flameeyes/birch-books-smarthome
245a8afc848b2a8cf4dbcde31b36716b44937200
[ "MIT" ]
null
null
null
circuitpython/schedule.py
Flameeyes/birch-books-smarthome
245a8afc848b2a8cf4dbcde31b36716b44937200
[ "MIT" ]
null
null
null
# SPDX-FileCopyrightText: © 2020 The birch-books-smarthome Authors # SPDX-License-Identifier: MIT BOOKSTORE_GROUND_FLOOR = 0x0007 BOOKSTORE_FIRST_FLOOR = 0x0008 BOOKSTORE_TERRARIUM = 0x0010 BOOKSTORE_BEDROOM = 0x0020 HOUSE_BASEMENT = 0x0040 HOUSE_GROUND_FLOOR = 0x0380 HOUSE_BEDROOM_LIGHT = 0x0400 HOUSE_BEDROOM_LAMP = 0x0800 HOUSE_FIREPLACE_1 = 0x1000 HOUSE_FIREPLACE_2 = 0x2000 SCHEDULE = [ BOOKSTORE_BEDROOM | HOUSE_BEDROOM_LIGHT, BOOKSTORE_TERRARIUM | BOOKSTORE_BEDROOM | HOUSE_BEDROOM_LIGHT, BOOKSTORE_TERRARIUM | BOOKSTORE_FIRST_FLOOR | HOUSE_BEDROOM_LIGHT, BOOKSTORE_TERRARIUM | BOOKSTORE_GROUND_FLOOR | HOUSE_GROUND_FLOOR, BOOKSTORE_TERRARIUM | BOOKSTORE_GROUND_FLOOR | HOUSE_GROUND_FLOOR, BOOKSTORE_TERRARIUM | BOOKSTORE_GROUND_FLOOR, BOOKSTORE_TERRARIUM | BOOKSTORE_GROUND_FLOOR, BOOKSTORE_TERRARIUM | BOOKSTORE_GROUND_FLOOR, BOOKSTORE_TERRARIUM | BOOKSTORE_GROUND_FLOOR, BOOKSTORE_TERRARIUM | BOOKSTORE_GROUND_FLOOR | HOUSE_GROUND_FLOOR, BOOKSTORE_TERRARIUM | BOOKSTORE_FIRST_FLOOR | HOUSE_GROUND_FLOOR, BOOKSTORE_TERRARIUM | BOOKSTORE_FIRST_FLOOR | HOUSE_BASEMENT | HOUSE_BEDROOM_LIGHT, BOOKSTORE_TERRARIUM | BOOKSTORE_BEDROOM | HOUSE_BASEMENT | HOUSE_BEDROOM_LAMP, BOOKSTORE_BEDROOM | HOUSE_BEDROOM_LAMP, 0, 0, ] TEST_SCHEDULE = [ BOOKSTORE_GROUND_FLOOR, BOOKSTORE_FIRST_FLOOR, BOOKSTORE_TERRARIUM, BOOKSTORE_BEDROOM, HOUSE_BASEMENT, HOUSE_GROUND_FLOOR, HOUSE_BEDROOM_LIGHT, HOUSE_BEDROOM_LAMP, HOUSE_FIREPLACE_1, HOUSE_FIREPLACE_2, ]
33.804348
87
0.803859
178
1,555
6.505618
0.213483
0.142487
0.303109
0.248705
0.576857
0.576857
0.553541
0.497409
0.353195
0.353195
0
0.045249
0.147267
1,555
45
88
34.555556
0.8273
0.059807
0
0.225
0
0
0
0
0
0
0.041124
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c3565453ab31565d1b32ad8f383deec201854e66
1,563
py
Python
services/smtp.py
sourceperl/docker.mqttwarn
9d87337f766843c8bdee34eba8d29776e7032009
[ "MIT" ]
null
null
null
services/smtp.py
sourceperl/docker.mqttwarn
9d87337f766843c8bdee34eba8d29776e7032009
[ "MIT" ]
null
null
null
services/smtp.py
sourceperl/docker.mqttwarn
9d87337f766843c8bdee34eba8d29776e7032009
[ "MIT" ]
2
2016-09-03T09:12:17.000Z
2020-03-03T11:58:40.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- __author__ = 'Jan-Piet Mens <jpmens()gmail.com>' __copyright__ = 'Copyright 2014 Jan-Piet Mens' __license__ = """Eclipse Public License - v 1.0 (http://www.eclipse.org/legal/epl-v10.html)""" import smtplib from email.mime.text import MIMEText def plugin(srv, item): srv.logging.debug("*** MODULE=%s: service=%s, target=%s", __file__, item.service, item.target) smtp_addresses = item.addrs server = item.config['server'] sender = item.config['sender'] starttls = item.config['starttls'] username = item.config['username'] password = item.config['password'] msg = MIMEText(item.message) msg['Subject'] = item.get('title', "%s notification" % (srv.SCRIPTNAME)) msg['To'] = ", ".join(smtp_addresses) msg['From'] = sender msg['X-Mailer'] = srv.SCRIPTNAME try: srv.logging.debug("Sending SMTP notification to %s [%s]..." % (item.target, smtp_addresses)) server = smtplib.SMTP(server) server.set_debuglevel(0) server.ehlo() if starttls: server.starttls() if username: server.login(username, password) server.sendmail(sender, smtp_addresses, msg.as_string()) server.quit() srv.logging.debug("Successfully sent SMTP notification") except Exception, e: srv.logging.warn("Error sending notification to SMTP recipient %s [%s]: %s" % (item.target, smtp_addresses, str(e))) return False return True
33.978261
124
0.621241
187
1,563
5.069519
0.481283
0.068565
0.047468
0.072785
0.052743
0.052743
0
0
0
0
0
0.008333
0.232246
1,563
45
125
34.733333
0.781667
0.026871
0
0
0
0.029412
0.250165
0
0
0
0
0
0
0
null
null
0.058824
0.058824
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
c35c0c54bc5945e22f05841e1485001ae7177f54
2,984
py
Python
scripts/convert_to_singlehead.py
Lollipop321/mini-decoder-network
cfdaba579b45cba1d181585e5430178c1dc60049
[ "BSD-3-Clause" ]
1
2021-09-18T05:07:38.000Z
2021-09-18T05:07:38.000Z
scripts/convert_to_singlehead.py
Lollipop321/mini-decoder-network
cfdaba579b45cba1d181585e5430178c1dc60049
[ "BSD-3-Clause" ]
null
null
null
scripts/convert_to_singlehead.py
Lollipop321/mini-decoder-network
cfdaba579b45cba1d181585e5430178c1dc60049
[ "BSD-3-Clause" ]
1
2021-12-24T13:05:02.000Z
2021-12-24T13:05:02.000Z
import torch import math import time import struct import argparse import numpy as np from collections import OrderedDict if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-model', required=True, help="trained model prefix, also include dir, e.g. ../data/model-100") args = parser.parse_args() model_path = args.model checkpoint = torch.load(model_path, map_location='cpu') assert 'args' in checkpoint assert 'model' in checkpoint args = checkpoint['args'] model = checkpoint['model'] checkpoint_new = {} model_new = {} e = [0, 0, 0, 0, 0, 0] d = [0, 0, 0, 0, 0, 0] for name, w in model.items(): if "decoder" in name: if "self_attn.in_proj" in name: layer = eval(name.split(".")[2]) wq, wk, wv = w.chunk(3, dim=0) assert args.encoder_embed_dim == args.decoder_embed_dim model_new[name] = torch.cat([wq[(args.encoder_embed_dim // 8 * e[layer]): (args.encoder_embed_dim // 8 * (e[layer] + 1))], wk[(args.encoder_embed_dim // 8 * e[layer]): (args.encoder_embed_dim // 8 * (e[layer] + 1))], wv[(args.encoder_embed_dim // 8 * e[layer]): (args.encoder_embed_dim // 8 * (e[layer] + 1))]], dim=0) elif "encoder_attn.in_proj" in name: layer = eval(name.split(".")[2]) wq, wk, wv = w.chunk(3, dim=0) assert args.encoder_embed_dim == args.decoder_embed_dim model_new[name] = torch.cat([wq[(args.encoder_embed_dim // 8 * d[layer]): (args.encoder_embed_dim // 8 * (d[layer] + 1))], wk[(args.encoder_embed_dim // 8 * d[layer]): (args.encoder_embed_dim // 8 * (d[layer] + 1))], wv[(args.encoder_embed_dim // 8 * d[layer]): (args.encoder_embed_dim // 8 * (d[layer] + 1))]], dim=0) elif "self_attn.out_proj.weight" in name: layer = eval(name.split(".")[2]) assert args.encoder_embed_dim == args.decoder_embed_dim model_new[name] = w[:, (args.encoder_embed_dim // 8 * e[layer]): (args.encoder_embed_dim // 8 * (e[layer] + 1))] elif "encoder_attn.out_proj.weight" in name: layer = eval(name.split(".")[2]) assert args.encoder_embed_dim == args.decoder_embed_dim model_new[name] = w[:, (args.encoder_embed_dim // 8 * d[layer]): (args.encoder_embed_dim // 8 * (d[layer] + 1))] else: model_new[name] = w else: model_new[name] = w checkpoint_new['args'] = args checkpoint_new['args'].arch = "transformer_singlehead_t2t_wmt_en_de" checkpoint_new['model'] = model_new # print(checkpoint_new['args'].arch) torch.save(checkpoint_new, 'checkpoint_singlehead.pt') print("finished!")
45.907692
146
0.560657
393
2,984
4.030534
0.208651
0.121212
0.20202
0.239899
0.585859
0.554293
0.546717
0.546717
0.541667
0.541667
0
0.023685
0.29256
2,984
65
147
45.907692
0.72667
0.011394
0
0.259259
0
0
0.094947
0.038318
0
0
0
0
0.111111
1
0
false
0
0.12963
0
0.12963
0.018519
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c360b0127afead19c24d728369419544803b4819
2,191
py
Python
Modulo_5/proyecto/presentacion/form_ubicacion/formAUbicacion_designer.py
AutodidactaMx/cocid_python
11628f465ff362807a692c79ede26bf30dd8e26a
[ "MIT" ]
null
null
null
Modulo_5/proyecto/presentacion/form_ubicacion/formAUbicacion_designer.py
AutodidactaMx/cocid_python
11628f465ff362807a692c79ede26bf30dd8e26a
[ "MIT" ]
null
null
null
Modulo_5/proyecto/presentacion/form_ubicacion/formAUbicacion_designer.py
AutodidactaMx/cocid_python
11628f465ff362807a692c79ede26bf30dd8e26a
[ "MIT" ]
1
2022-03-04T00:57:18.000Z
2022-03-04T00:57:18.000Z
import tkinter as tk import matplotlib.pyplot as plt from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg from pandastable import Table import util.generic as utl class FormUbicacionDesigner(tk.Toplevel): def __init__(self): super().__init__() def initialize_component(self): self.config_window() self.framePrincipal() self.framePrincipalPanel1() self.framePrincipalPanel2() self.tablaEstadisticosUbicacion() self.graficaUbicacion() def config_window(self): self.title('Analisis de variable de ubicación') w, h = 1400,500 self.geometry("%dx%d+0+0" % (w, h)) self.config(bg='black') utl.centrar_ventana(self,w,h) def framePrincipal(self): self.frame_zona_principal = tk.Frame(self, bd=0, relief=tk.SOLID, bg='white', width=100, height=100) self.frame_zona_principal.pack(side="top",fill=tk.BOTH ) def framePrincipalPanel1(self): self.frame_zona_principal_panel1 = tk.Frame(self.frame_zona_principal, bd=1, relief=tk.SOLID, bg='white', width=100, height=100) self.frame_zona_principal_panel1.pack(side="left",fill=tk.BOTH, expand="yes") def framePrincipalPanel2(self): self.frame_zona_principal_panel2 = tk.Frame(self.frame_zona_principal, bd=1, relief=tk.SOLID, bg='white', width=100, height=100) self.frame_zona_principal_panel2.pack(side="left",fill=tk.BOTH, expand="yes") def tablaEstadisticosUbicacion(self): self.tablaDatosUbicacion = Table(self.frame_zona_principal_panel1, showtoolbar=False, showstatusbar=False, rows=8,width=500) self.tablaDatosUbicacion.show() def graficaUbicacion(self): self.figure_ubicacion = plt.Figure(figsize=(50,10)) self.canvas_figure_ubicacion = FigureCanvasTkAgg(self.figure_ubicacion, self.frame_zona_principal_panel2) self.canvas_figure_ubicacion.get_tk_widget().pack(side=tk.LEFT, fill=tk.X, pady=20)
41.339623
137
0.645824
253
2,191
5.407115
0.351779
0.065789
0.095029
0.160819
0.341374
0.225877
0.225877
0.225877
0.225877
0.17617
0
0.030432
0.250114
2,191
52
138
42.134615
0.802191
0
0
0
0
0
0.036057
0
0
0
0
0
0
1
0.216216
false
0
0.135135
0
0.378378
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
c36ce52f1b69aad8e3b2676523c1755292c1c03c
29,068
py
Python
src/flower/proto/transport_pb2.py
xinchiqiu/flower
ef12441fdebaa32f34e12dd02dfa376fa2988eaf
[ "Apache-2.0" ]
null
null
null
src/flower/proto/transport_pb2.py
xinchiqiu/flower
ef12441fdebaa32f34e12dd02dfa376fa2988eaf
[ "Apache-2.0" ]
null
null
null
src/flower/proto/transport_pb2.py
xinchiqiu/flower
ef12441fdebaa32f34e12dd02dfa376fa2988eaf
[ "Apache-2.0" ]
1
2020-06-01T11:06:18.000Z
2020-06-01T11:06:18.000Z
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: flower/proto/transport.proto from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='flower/proto/transport.proto', package='flower.transport', syntax='proto3', serialized_options=None, serialized_pb=b'\n\x1c\x66lower/proto/transport.proto\x12\x10\x66lower.transport\"2\n\nParameters\x12\x0f\n\x07tensors\x18\x01 \x03(\x0c\x12\x13\n\x0btensor_type\x18\x02 \x01(\t\"\xb8\x05\n\rServerMessage\x12>\n\treconnect\x18\x01 \x01(\x0b\x32).flower.transport.ServerMessage.ReconnectH\x00\x12G\n\x0eget_parameters\x18\x02 \x01(\x0b\x32-.flower.transport.ServerMessage.GetParametersH\x00\x12\x39\n\x07\x66it_ins\x18\x03 \x01(\x0b\x32&.flower.transport.ServerMessage.FitInsH\x00\x12\x43\n\x0c\x65valuate_ins\x18\x04 \x01(\x0b\x32+.flower.transport.ServerMessage.EvaluateInsH\x00\x1a\x1c\n\tReconnect\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x1a\x0f\n\rGetParameters\x1a\xad\x01\n\x06\x46itIns\x12\x30\n\nparameters\x18\x01 \x01(\x0b\x32\x1c.flower.transport.Parameters\x12\x42\n\x06\x63onfig\x18\x02 \x03(\x0b\x32\x32.flower.transport.ServerMessage.FitIns.ConfigEntry\x1a-\n\x0b\x43onfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\xb7\x01\n\x0b\x45valuateIns\x12\x30\n\nparameters\x18\x01 \x01(\x0b\x32\x1c.flower.transport.Parameters\x12G\n\x06\x63onfig\x18\x02 \x03(\x0b\x32\x37.flower.transport.ServerMessage.EvaluateIns.ConfigEntry\x1a-\n\x0b\x43onfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x05\n\x03msg\"\xbc\x04\n\rClientMessage\x12@\n\ndisconnect\x18\x01 \x01(\x0b\x32*.flower.transport.ClientMessage.DisconnectH\x00\x12G\n\x0eparameters_res\x18\x02 \x01(\x0b\x32-.flower.transport.ClientMessage.ParametersResH\x00\x12\x39\n\x07\x66it_res\x18\x03 \x01(\x0b\x32&.flower.transport.ClientMessage.FitResH\x00\x12\x43\n\x0c\x65valuate_res\x18\x04 \x01(\x0b\x32+.flower.transport.ClientMessage.EvaluateResH\x00\x1a\x36\n\nDisconnect\x12(\n\x06reason\x18\x01 \x01(\x0e\x32\x18.flower.transport.Reason\x1a\x41\n\rParametersRes\x12\x30\n\nparameters\x18\x01 \x01(\x0b\x32\x1c.flower.transport.Parameters\x1ak\n\x06\x46itRes\x12\x30\n\nparameters\x18\x01 \x01(\x0b\x32\x1c.flower.transport.Parameters\x12\x14\n\x0cnum_examples\x18\x02 \x01(\x03\x12\x19\n\x11num_examples_ceil\x18\x03 \x01(\x03\x1a\x31\n\x0b\x45valuateRes\x12\x14\n\x0cnum_examples\x18\x01 \x01(\x03\x12\x0c\n\x04loss\x18\x02 \x01(\x02\x42\x05\n\x03msg*R\n\x06Reason\x12\x0b\n\x07UNKNOWN\x10\x00\x12\r\n\tRECONNECT\x10\x01\x12\x16\n\x12POWER_DISCONNECTED\x10\x02\x12\x14\n\x10WIFI_UNAVAILABLE\x10\x03\x32_\n\rFlowerService\x12N\n\x04Join\x12\x1f.flower.transport.ClientMessage\x1a\x1f.flower.transport.ServerMessage\"\x00(\x01\x30\x01\x62\x06proto3' ) _REASON = _descriptor.EnumDescriptor( name='Reason', full_name='flower.transport.Reason', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='UNKNOWN', index=0, number=0, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='RECONNECT', index=1, number=1, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='POWER_DISCONNECTED', index=2, number=2, serialized_options=None, type=None), _descriptor.EnumValueDescriptor( name='WIFI_UNAVAILABLE', index=3, number=3, serialized_options=None, type=None), ], containing_type=None, serialized_options=None, serialized_start=1376, serialized_end=1458, ) _sym_db.RegisterEnumDescriptor(_REASON) Reason = enum_type_wrapper.EnumTypeWrapper(_REASON) UNKNOWN = 0 RECONNECT = 1 POWER_DISCONNECTED = 2 WIFI_UNAVAILABLE = 3 _PARAMETERS = _descriptor.Descriptor( name='Parameters', full_name='flower.transport.Parameters', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='tensors', full_name='flower.transport.Parameters.tensors', index=0, number=1, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='tensor_type', full_name='flower.transport.Parameters.tensor_type', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=50, serialized_end=100, ) _SERVERMESSAGE_RECONNECT = _descriptor.Descriptor( name='Reconnect', full_name='flower.transport.ServerMessage.Reconnect', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='seconds', full_name='flower.transport.ServerMessage.Reconnect.seconds', index=0, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=385, serialized_end=413, ) _SERVERMESSAGE_GETPARAMETERS = _descriptor.Descriptor( name='GetParameters', full_name='flower.transport.ServerMessage.GetParameters', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=415, serialized_end=430, ) _SERVERMESSAGE_FITINS_CONFIGENTRY = _descriptor.Descriptor( name='ConfigEntry', full_name='flower.transport.ServerMessage.FitIns.ConfigEntry', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='key', full_name='flower.transport.ServerMessage.FitIns.ConfigEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='flower.transport.ServerMessage.FitIns.ConfigEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'8\001', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=561, serialized_end=606, ) _SERVERMESSAGE_FITINS = _descriptor.Descriptor( name='FitIns', full_name='flower.transport.ServerMessage.FitIns', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='parameters', full_name='flower.transport.ServerMessage.FitIns.parameters', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='config', full_name='flower.transport.ServerMessage.FitIns.config', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_SERVERMESSAGE_FITINS_CONFIGENTRY, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=433, serialized_end=606, ) _SERVERMESSAGE_EVALUATEINS_CONFIGENTRY = _descriptor.Descriptor( name='ConfigEntry', full_name='flower.transport.ServerMessage.EvaluateIns.ConfigEntry', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='key', full_name='flower.transport.ServerMessage.EvaluateIns.ConfigEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='flower.transport.ServerMessage.EvaluateIns.ConfigEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'8\001', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=561, serialized_end=606, ) _SERVERMESSAGE_EVALUATEINS = _descriptor.Descriptor( name='EvaluateIns', full_name='flower.transport.ServerMessage.EvaluateIns', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='parameters', full_name='flower.transport.ServerMessage.EvaluateIns.parameters', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='config', full_name='flower.transport.ServerMessage.EvaluateIns.config', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_SERVERMESSAGE_EVALUATEINS_CONFIGENTRY, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=609, serialized_end=792, ) _SERVERMESSAGE = _descriptor.Descriptor( name='ServerMessage', full_name='flower.transport.ServerMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='reconnect', full_name='flower.transport.ServerMessage.reconnect', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='get_parameters', full_name='flower.transport.ServerMessage.get_parameters', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='fit_ins', full_name='flower.transport.ServerMessage.fit_ins', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='evaluate_ins', full_name='flower.transport.ServerMessage.evaluate_ins', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_SERVERMESSAGE_RECONNECT, _SERVERMESSAGE_GETPARAMETERS, _SERVERMESSAGE_FITINS, _SERVERMESSAGE_EVALUATEINS, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='msg', full_name='flower.transport.ServerMessage.msg', index=0, containing_type=None, fields=[]), ], serialized_start=103, serialized_end=799, ) _CLIENTMESSAGE_DISCONNECT = _descriptor.Descriptor( name='Disconnect', full_name='flower.transport.ClientMessage.Disconnect', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='reason', full_name='flower.transport.ClientMessage.Disconnect.reason', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1086, serialized_end=1140, ) _CLIENTMESSAGE_PARAMETERSRES = _descriptor.Descriptor( name='ParametersRes', full_name='flower.transport.ClientMessage.ParametersRes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='parameters', full_name='flower.transport.ClientMessage.ParametersRes.parameters', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1142, serialized_end=1207, ) _CLIENTMESSAGE_FITRES = _descriptor.Descriptor( name='FitRes', full_name='flower.transport.ClientMessage.FitRes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='parameters', full_name='flower.transport.ClientMessage.FitRes.parameters', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='num_examples', full_name='flower.transport.ClientMessage.FitRes.num_examples', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='num_examples_ceil', full_name='flower.transport.ClientMessage.FitRes.num_examples_ceil', index=2, number=3, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1209, serialized_end=1316, ) _CLIENTMESSAGE_EVALUATERES = _descriptor.Descriptor( name='EvaluateRes', full_name='flower.transport.ClientMessage.EvaluateRes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='num_examples', full_name='flower.transport.ClientMessage.EvaluateRes.num_examples', index=0, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='loss', full_name='flower.transport.ClientMessage.EvaluateRes.loss', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1318, serialized_end=1367, ) _CLIENTMESSAGE = _descriptor.Descriptor( name='ClientMessage', full_name='flower.transport.ClientMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='disconnect', full_name='flower.transport.ClientMessage.disconnect', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='parameters_res', full_name='flower.transport.ClientMessage.parameters_res', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='fit_res', full_name='flower.transport.ClientMessage.fit_res', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='evaluate_res', full_name='flower.transport.ClientMessage.evaluate_res', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[_CLIENTMESSAGE_DISCONNECT, _CLIENTMESSAGE_PARAMETERSRES, _CLIENTMESSAGE_FITRES, _CLIENTMESSAGE_EVALUATERES, ], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='msg', full_name='flower.transport.ClientMessage.msg', index=0, containing_type=None, fields=[]), ], serialized_start=802, serialized_end=1374, ) _SERVERMESSAGE_RECONNECT.containing_type = _SERVERMESSAGE _SERVERMESSAGE_GETPARAMETERS.containing_type = _SERVERMESSAGE _SERVERMESSAGE_FITINS_CONFIGENTRY.containing_type = _SERVERMESSAGE_FITINS _SERVERMESSAGE_FITINS.fields_by_name['parameters'].message_type = _PARAMETERS _SERVERMESSAGE_FITINS.fields_by_name['config'].message_type = _SERVERMESSAGE_FITINS_CONFIGENTRY _SERVERMESSAGE_FITINS.containing_type = _SERVERMESSAGE _SERVERMESSAGE_EVALUATEINS_CONFIGENTRY.containing_type = _SERVERMESSAGE_EVALUATEINS _SERVERMESSAGE_EVALUATEINS.fields_by_name['parameters'].message_type = _PARAMETERS _SERVERMESSAGE_EVALUATEINS.fields_by_name['config'].message_type = _SERVERMESSAGE_EVALUATEINS_CONFIGENTRY _SERVERMESSAGE_EVALUATEINS.containing_type = _SERVERMESSAGE _SERVERMESSAGE.fields_by_name['reconnect'].message_type = _SERVERMESSAGE_RECONNECT _SERVERMESSAGE.fields_by_name['get_parameters'].message_type = _SERVERMESSAGE_GETPARAMETERS _SERVERMESSAGE.fields_by_name['fit_ins'].message_type = _SERVERMESSAGE_FITINS _SERVERMESSAGE.fields_by_name['evaluate_ins'].message_type = _SERVERMESSAGE_EVALUATEINS _SERVERMESSAGE.oneofs_by_name['msg'].fields.append( _SERVERMESSAGE.fields_by_name['reconnect']) _SERVERMESSAGE.fields_by_name['reconnect'].containing_oneof = _SERVERMESSAGE.oneofs_by_name['msg'] _SERVERMESSAGE.oneofs_by_name['msg'].fields.append( _SERVERMESSAGE.fields_by_name['get_parameters']) _SERVERMESSAGE.fields_by_name['get_parameters'].containing_oneof = _SERVERMESSAGE.oneofs_by_name['msg'] _SERVERMESSAGE.oneofs_by_name['msg'].fields.append( _SERVERMESSAGE.fields_by_name['fit_ins']) _SERVERMESSAGE.fields_by_name['fit_ins'].containing_oneof = _SERVERMESSAGE.oneofs_by_name['msg'] _SERVERMESSAGE.oneofs_by_name['msg'].fields.append( _SERVERMESSAGE.fields_by_name['evaluate_ins']) _SERVERMESSAGE.fields_by_name['evaluate_ins'].containing_oneof = _SERVERMESSAGE.oneofs_by_name['msg'] _CLIENTMESSAGE_DISCONNECT.fields_by_name['reason'].enum_type = _REASON _CLIENTMESSAGE_DISCONNECT.containing_type = _CLIENTMESSAGE _CLIENTMESSAGE_PARAMETERSRES.fields_by_name['parameters'].message_type = _PARAMETERS _CLIENTMESSAGE_PARAMETERSRES.containing_type = _CLIENTMESSAGE _CLIENTMESSAGE_FITRES.fields_by_name['parameters'].message_type = _PARAMETERS _CLIENTMESSAGE_FITRES.containing_type = _CLIENTMESSAGE _CLIENTMESSAGE_EVALUATERES.containing_type = _CLIENTMESSAGE _CLIENTMESSAGE.fields_by_name['disconnect'].message_type = _CLIENTMESSAGE_DISCONNECT _CLIENTMESSAGE.fields_by_name['parameters_res'].message_type = _CLIENTMESSAGE_PARAMETERSRES _CLIENTMESSAGE.fields_by_name['fit_res'].message_type = _CLIENTMESSAGE_FITRES _CLIENTMESSAGE.fields_by_name['evaluate_res'].message_type = _CLIENTMESSAGE_EVALUATERES _CLIENTMESSAGE.oneofs_by_name['msg'].fields.append( _CLIENTMESSAGE.fields_by_name['disconnect']) _CLIENTMESSAGE.fields_by_name['disconnect'].containing_oneof = _CLIENTMESSAGE.oneofs_by_name['msg'] _CLIENTMESSAGE.oneofs_by_name['msg'].fields.append( _CLIENTMESSAGE.fields_by_name['parameters_res']) _CLIENTMESSAGE.fields_by_name['parameters_res'].containing_oneof = _CLIENTMESSAGE.oneofs_by_name['msg'] _CLIENTMESSAGE.oneofs_by_name['msg'].fields.append( _CLIENTMESSAGE.fields_by_name['fit_res']) _CLIENTMESSAGE.fields_by_name['fit_res'].containing_oneof = _CLIENTMESSAGE.oneofs_by_name['msg'] _CLIENTMESSAGE.oneofs_by_name['msg'].fields.append( _CLIENTMESSAGE.fields_by_name['evaluate_res']) _CLIENTMESSAGE.fields_by_name['evaluate_res'].containing_oneof = _CLIENTMESSAGE.oneofs_by_name['msg'] DESCRIPTOR.message_types_by_name['Parameters'] = _PARAMETERS DESCRIPTOR.message_types_by_name['ServerMessage'] = _SERVERMESSAGE DESCRIPTOR.message_types_by_name['ClientMessage'] = _CLIENTMESSAGE DESCRIPTOR.enum_types_by_name['Reason'] = _REASON _sym_db.RegisterFileDescriptor(DESCRIPTOR) Parameters = _reflection.GeneratedProtocolMessageType('Parameters', (_message.Message,), { 'DESCRIPTOR' : _PARAMETERS, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.Parameters) }) _sym_db.RegisterMessage(Parameters) ServerMessage = _reflection.GeneratedProtocolMessageType('ServerMessage', (_message.Message,), { 'Reconnect' : _reflection.GeneratedProtocolMessageType('Reconnect', (_message.Message,), { 'DESCRIPTOR' : _SERVERMESSAGE_RECONNECT, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ServerMessage.Reconnect) }) , 'GetParameters' : _reflection.GeneratedProtocolMessageType('GetParameters', (_message.Message,), { 'DESCRIPTOR' : _SERVERMESSAGE_GETPARAMETERS, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ServerMessage.GetParameters) }) , 'FitIns' : _reflection.GeneratedProtocolMessageType('FitIns', (_message.Message,), { 'ConfigEntry' : _reflection.GeneratedProtocolMessageType('ConfigEntry', (_message.Message,), { 'DESCRIPTOR' : _SERVERMESSAGE_FITINS_CONFIGENTRY, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ServerMessage.FitIns.ConfigEntry) }) , 'DESCRIPTOR' : _SERVERMESSAGE_FITINS, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ServerMessage.FitIns) }) , 'EvaluateIns' : _reflection.GeneratedProtocolMessageType('EvaluateIns', (_message.Message,), { 'ConfigEntry' : _reflection.GeneratedProtocolMessageType('ConfigEntry', (_message.Message,), { 'DESCRIPTOR' : _SERVERMESSAGE_EVALUATEINS_CONFIGENTRY, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ServerMessage.EvaluateIns.ConfigEntry) }) , 'DESCRIPTOR' : _SERVERMESSAGE_EVALUATEINS, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ServerMessage.EvaluateIns) }) , 'DESCRIPTOR' : _SERVERMESSAGE, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ServerMessage) }) _sym_db.RegisterMessage(ServerMessage) _sym_db.RegisterMessage(ServerMessage.Reconnect) _sym_db.RegisterMessage(ServerMessage.GetParameters) _sym_db.RegisterMessage(ServerMessage.FitIns) _sym_db.RegisterMessage(ServerMessage.FitIns.ConfigEntry) _sym_db.RegisterMessage(ServerMessage.EvaluateIns) _sym_db.RegisterMessage(ServerMessage.EvaluateIns.ConfigEntry) ClientMessage = _reflection.GeneratedProtocolMessageType('ClientMessage', (_message.Message,), { 'Disconnect' : _reflection.GeneratedProtocolMessageType('Disconnect', (_message.Message,), { 'DESCRIPTOR' : _CLIENTMESSAGE_DISCONNECT, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ClientMessage.Disconnect) }) , 'ParametersRes' : _reflection.GeneratedProtocolMessageType('ParametersRes', (_message.Message,), { 'DESCRIPTOR' : _CLIENTMESSAGE_PARAMETERSRES, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ClientMessage.ParametersRes) }) , 'FitRes' : _reflection.GeneratedProtocolMessageType('FitRes', (_message.Message,), { 'DESCRIPTOR' : _CLIENTMESSAGE_FITRES, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ClientMessage.FitRes) }) , 'EvaluateRes' : _reflection.GeneratedProtocolMessageType('EvaluateRes', (_message.Message,), { 'DESCRIPTOR' : _CLIENTMESSAGE_EVALUATERES, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ClientMessage.EvaluateRes) }) , 'DESCRIPTOR' : _CLIENTMESSAGE, '__module__' : 'flower.proto.transport_pb2' # @@protoc_insertion_point(class_scope:flower.transport.ClientMessage) }) _sym_db.RegisterMessage(ClientMessage) _sym_db.RegisterMessage(ClientMessage.Disconnect) _sym_db.RegisterMessage(ClientMessage.ParametersRes) _sym_db.RegisterMessage(ClientMessage.FitRes) _sym_db.RegisterMessage(ClientMessage.EvaluateRes) _SERVERMESSAGE_FITINS_CONFIGENTRY._options = None _SERVERMESSAGE_EVALUATEINS_CONFIGENTRY._options = None _FLOWERSERVICE = _descriptor.ServiceDescriptor( name='FlowerService', full_name='flower.transport.FlowerService', file=DESCRIPTOR, index=0, serialized_options=None, serialized_start=1460, serialized_end=1555, methods=[ _descriptor.MethodDescriptor( name='Join', full_name='flower.transport.FlowerService.Join', index=0, containing_service=None, input_type=_CLIENTMESSAGE, output_type=_SERVERMESSAGE, serialized_options=None, ), ]) _sym_db.RegisterServiceDescriptor(_FLOWERSERVICE) DESCRIPTOR.services_by_name['FlowerService'] = _FLOWERSERVICE # @@protoc_insertion_point(module_scope)
39.494565
2,507
0.762901
3,395
29,068
6.218851
0.074816
0.037134
0.044759
0.047933
0.722777
0.663335
0.595036
0.577275
0.532516
0.527969
0
0.032427
0.115213
29,068
735
2,508
39.548299
0.788483
0.041695
0
0.631501
1
0.001486
0.224665
0.170061
0
0
0
0
0
1
0
false
0
0.007429
0
0.007429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c382afee49a8dcf277085e3abd2845bbc944eef7
5,838
py
Python
equatation.py
asteinig4018/mhacks19
64b5ba211f38f3b30927bf926073b777fb23a46f
[ "MIT" ]
1
2019-10-12T21:30:15.000Z
2019-10-12T21:30:15.000Z
equatation.py
asteinig4018/mhacks19
64b5ba211f38f3b30927bf926073b777fb23a46f
[ "MIT" ]
null
null
null
equatation.py
asteinig4018/mhacks19
64b5ba211f38f3b30927bf926073b777fb23a46f
[ "MIT" ]
null
null
null
import json import math from HistoricalTweetDataFetcher import getHistoricalData joelsarray = getHistoricalData(0) arrs = [] arrm = [] arrp = [] arrsTotal = 0 arrmTotal = 0 ncount = 0 ccount = 0 lcount = 0 time = joelsarray[0]["h"] for dictionary in joelsarray: arrs.append(dictionary["s"]) arrm.append(dictionary["m"]) arrp.append(dictionary["p"]) for x in range(len(arrs)): arrsTotal += arrs[x] arrmTotal += arrm[x] if arrp[x]=='l': lcount += 1 elif arrp[x]=='c': ccount += 1 elif arrp[x]=='n': ncount += 1 arrsAvg = arrsTotal/len(arrs)#sentiment value arrmAvg = arrmTotal/len(arrm)#magnitude value #print(arrsTotal) #print(len(arrs)) #rint(arrsAvg) #print(arrmAvg) #print(lcount) #print(ccount) ################################################################### filename2 = "weather_us.json" if filename2: with open(filename2, 'r') as f: weatherstore = json.load(f) for x in range(50): statearray = list(weatherstore.keys()) statesAverage = 0 for state in statearray: for x in range(50): temptemp = float(weatherstore[state]["temperature"]) temphigh = float(weatherstore[state]["average_monthly_high"]) templow = float(weatherstore[state]["average_monthly_low"]) statesAverage+=((temptemp-temphigh)*(templow-temptemp))/(math.pow(((temphigh+templow)/2),2)) statesAverage = statesAverage/50 #this is the average tempeature multiplyer print(statesAverage) ##################################################################################### filename3 = "sp500_price.json" if filename3: with open(filename3, 'r') as f: stockdata = json.load(f) stockpricecurrent = stockdata["current_price"] stockpricechange = stockdata["percent_change"]#percent change of S&P500 if stockpricechange <= 0.73 and stockpricechange >=-0.73: stockmultiply = 0; else: stockmultiply = stockpricechange*0.5*0.73 print(stockpricechange) ######################################################################################### filename4 = "trump_approval_rating.json" if filename4: with open(filename4, 'r') as f: approvalratingdata = json.load(f) approveAvg = approvalratingdata["approve_avg"]#approval average data currentApproval = approvalratingdata["approve"]#current approval percentage ######################################################################################## def equation(sentiment, stockmultiply, pollcurrent, pollaverage, avgtemp, lvalue, cvalue, ltweets, ctweet, time, const1 = 70, const2 = 60, const3 = 50, const4 = 45, const5 = 25, slideInput = True): point = const1*(sentiment) + const2*(stockmultiply)+const3*((pollcurrent-pollaverage)/(pollaverage))+const4*avgtemp + const5/2*lvalue*ltweets+ const5/2*cvalue+ctweet+const5 filename5 = "data.json" if(slideInput==True): if filename5: with open(filename5, 'r') as f: outputdata = json.load(f) print(outputdata) outputdata["chartData"]["labels"][0]=outputdata["chartData"]["labels"][1] outputdata["chartData"]["labels"][1]=outputdata["chartData"]["labels"][2] outputdata["chartData"]["labels"][2]=outputdata["chartData"]["labels"][3] outputdata["chartData"]["labels"][3]=outputdata["chartData"]["labels"][4] outputdata["chartData"]["labels"][4]=outputdata["chartData"]["labels"][5] outputdata["chartData"]["labels"][5]=outputdata["chartData"]["labels"][6] outputdata["chartData"]["labels"][6] = str(time)+":00" outputdata["chartData"]["thisWeek"][0]=outputdata["chartData"]["thisWeek"][1] outputdata["chartData"]["thisWeek"][1]=outputdata["chartData"]["thisWeek"][2] outputdata["chartData"]["thisWeek"][2]=outputdata["chartData"]["thisWeek"][3] outputdata["chartData"]["thisWeek"][3]=outputdata["chartData"]["thisWeek"][4] outputdata["chartData"]["thisWeek"][4]=outputdata["chartData"]["thisWeek"][5] outputdata["chartData"]["thisWeek"][5]=outputdata["chartData"]["thisWeek"][6] outputdata["chartData"]["thisWeek"][6] = point with open(filename5, 'w') as f: json.dump(outputdata, f) else: if filename5: with open(filename5, 'r') as f: outputdata = json.load(f) print(outputdata) outputdata["chartData"]["labels"][0]=outputdata["chartData"]["labels"][1] outputdata["chartData"]["labels"][1]=outputdata["chartData"]["labels"][2] outputdata["chartData"]["labels"][2]=outputdata["chartData"]["labels"][3] outputdata["chartData"]["labels"][3]=outputdata["chartData"]["labels"][4] outputdata["chartData"]["labels"][4]=outputdata["chartData"]["labels"][5] outputdata["chartData"]["labels"][5]=outputdata["chartData"]["labels"][6] outputdata["chartData"]["labels"][6] = str(time) + ":00" outputdata["chartData"]["thisWeek"][0]=outputdata["chartData"]["thisWeek"][1] outputdata["chartData"]["thisWeek"][1]=outputdata["chartData"]["thisWeek"][2] outputdata["chartData"]["thisWeek"][2]=outputdata["chartData"]["thisWeek"][3] outputdata["chartData"]["thisWeek"][3]=outputdata["chartData"]["thisWeek"][4] outputdata["chartData"]["thisWeek"][4]=outputdata["chartData"]["thisWeek"][5] outputdata["chartData"]["thisWeek"][5]=outputdata["chartData"]["thisWeek"][6] outputdata["chartData"]["thisWeek"][6] = point with open(filename5, 'w') as f: json.dump(outputdata, f) return point my_list = equation(arrsAvg, stockmultiply, currentApproval, approveAvg, statesAverage, 0, 0, lcount, ccount, 17, 70, 60, 50, 45, 25)
39.714286
198
0.604488
589
5,838
5.971138
0.2309
0.280921
0.184817
0.029571
0.495309
0.467444
0.467444
0.467444
0.467444
0.467444
0
0.030517
0.174889
5,838
146
199
39.986301
0.699606
0.039054
0
0.411215
0
0
0.197736
0.005075
0
0
0
0
0
1
0.009346
false
0
0.028037
0
0.046729
0.037383
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
5ee2508b1563859bc37a102d678ee13eb3c4fb40
3,496
py
Python
py_include/__init__.py
mauro-balades/py-include
d2d3b90fc840a550a49d76798bc8a97b076480d8
[ "MIT" ]
2
2022-01-30T13:23:22.000Z
2022-01-31T10:23:46.000Z
py_include/__init__.py
mauro-balades/py-include
d2d3b90fc840a550a49d76798bc8a97b076480d8
[ "MIT" ]
null
null
null
py_include/__init__.py
mauro-balades/py-include
d2d3b90fc840a550a49d76798bc8a97b076480d8
[ "MIT" ]
null
null
null
#!/usr/bin/python3 """ | --------------------- Py include <Mauro Baladés> --------------------- | ___ _ _ _ __ _ _ ___ ____ | | |_) \ \_/ | | | |\ | / /` | | | | | | | \ | |_ | |_| |_| |_| |_| \| \_\_, |_|__ \_\_/ |_|_/ |_|__ | ---------------------------------------------------------------------- | MIT License | | Copyright (c) 2022 Mauro Baladés | | Permission is hereby granted, free of charge, to any person obtaining a copy | of this software and associated documentation files (the "Software"), to deal | in the Software without restriction, including without limitation the rights | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | copies of the Software, and to permit persons to whom the Software is | furnished to do so, subject to the following conditions: | | The above copyright notice and this permission notice shall be included in all | copies or substantial portions of the Software. | | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | SOFTWARE. | """ from pathlib import Path import sys def _exec_modules(*args, **kwargs): # Get locals from kwargs local = kwargs.get("local", None) # Check if local is None, # because user did not define it. if local is None: raise Exception("Need to pass the local variable") # Iterate every path that user gives as # arguments (stored in *args). for arg in args: # Store the path into a # platform specific-path path = Path(arg) # Open the file and get it's # content with open(path, "r") as f: data = f.read() # Execute the file content. exec(data, globals(), local) def _ret_modules(*args, **kwargs): pass def include(*args, **kwargs): """Here is where all the magic ocour. This function takes an infinite amount of paths and they are being executend to feel like user imported it. Note: It can also be used to store it into a variable if user needs it. This can be done by adding the argument `ret` to True (more detail in #Args). Note: Please note how (for the import statement) you will need a `__init__.py` and paths separated by dots. With py-include, you don't need. Py-include will make your path supported by the current platform and it will open it's content and execute it, so you don't need a path divided by `.` or a `__init__.py` Args: files [list(str)]: A list of paths to include. ret [bool]: If it is set to True, return the module (defaults to False). Note: If `ret` is set to `True`, the function will return all modules as user will need to unpack them. """ # Get the value whether user whan't to execute # the module or to return it. (defaults to False) ret = kwargs.get("ret", False) # Check if user inserted `ret` as True. If it not, # we will open the file and execute it's content. # If it is True, we will return the module they # whanted to import. if not ret: _exec_modules(*args, **kwargs) return _ret_modules(*args, **kwargs)
33.295238
80
0.65246
510
3,496
4.370588
0.405882
0.03948
0.030507
0.018843
0
0
0
0
0
0
0
0.001863
0.232265
3,496
104
81
33.615385
0.828614
0.810069
0
0
0
0
0.06734
0
0
0
0
0
0
1
0.166667
false
0.111111
0.111111
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
5ee68ea9e8a99cf09e7e6d0ca6ce334ef983ef7f
4,724
py
Python
model_compression_toolkit/common/graph/graph_matchers.py
eladc-git/model_optimization
46d1c893ca23e61d8ef7597184ad2ba6e2ae6e7a
[ "Apache-2.0" ]
null
null
null
model_compression_toolkit/common/graph/graph_matchers.py
eladc-git/model_optimization
46d1c893ca23e61d8ef7597184ad2ba6e2ae6e7a
[ "Apache-2.0" ]
null
null
null
model_compression_toolkit/common/graph/graph_matchers.py
eladc-git/model_optimization
46d1c893ca23e61d8ef7597184ad2ba6e2ae6e7a
[ "Apache-2.0" ]
null
null
null
# Copyright 2021 Sony Semiconductors Israel, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from typing import Any, List from model_compression_toolkit.common.graph.base_node import BaseNode from model_compression_toolkit.common.matchers import node_matcher, walk_matcher, edge_matcher class NodeOperationMatcher(node_matcher.BaseNodeMatcher): """ Class NodeOperationMatcher to check if the layer class of a node matches a specific layer. """ def __init__(self, operation: Any): """ Init for class NodeOperationMathcer. Args: operation: Which layer to check if matches. """ self.operation = operation def apply(self, input_node_object: Any) -> bool: """ Check if input_node_object matches the matcher condition. Args: input_node_object: Node object to check the matcher on. Returns: True if input_node_object is the layer the NodeOperationMatcher holds. Otherwise, return nothing. """ if input_node_object.type == self.operation: return True class NodeFrameworkAttrMatcher(node_matcher.BaseNodeMatcher): """ Class NodeFrameworkAttrMatcher to check if a node's attribute has a specific value. """ def __init__(self, attr_name: str, attr_value: Any): """ Init a NodeFrameworkAttrMatcher object. Args: attr_name: Name of node's attribute to check. attr_value: Value to check if the attribute is equal to. """ self.attr_name = attr_name self.attr_value = attr_value def apply(self, input_node_object: Any) -> bool: """ Check if input_node_object has an attribute with the value the NodeFrameworkAttrMatcher contains. Args: input_node_object: Node object to check for its attribute and value. Returns: True if the node has an attribute with the attribute name and the value that were passed during the initialization of NodeFrameworkAttrMatcher. """ if self.attr_name in input_node_object.framework_attr: if input_node_object.framework_attr[self.attr_name] == self.attr_value: return True class EdgeMatcher(edge_matcher.BaseEdgeMatcher): """ class EdgeMatcher to check if an edge matches an edge that EdgeMatcher contains. """ def __init__(self, source_matcher: BaseNode, target_matcher: BaseNode): """ Init an EdgeMatcher object. Args: source_matcher: Source node to match. target_matcher: Destination node to match. """ super().__init__(source_matcher, target_matcher) def apply(self, input_object: Any) -> bool: """ Check if input_object is a tuple of two nodes and the same nodes that were passed during the EdgeMatcher initialization. Args: input_object: Object to check if equals to the edge EdgeMatcher holds. Returns: Whether input_object is equal to the edge EdgeMatcher holds or not. """ if isinstance(input_object, tuple) and len(input_object) >= 2: return self.source_matcher.apply(input_object[0]) and self.target_matcher.apply(input_object[1]) else: return False class WalkMatcher(walk_matcher.WalkMatcherList): """ Class WalkMatcher to check if a list of nodes matches another list of nodes. """ def __init__(self, matcher_list: List[BaseNode]): """ Init a WalkMatcher object. Args: matcher_list: List of nodes to holds for checking. """ super().__init__(matcher_list) def apply(self, input_object: Any) -> bool: # not in use """ Check if a list of nodes matches the list of nodes the WalkMatcher holds. Args: input_object: Object to check. Returns: True if input_object matches the list of nodes the WalkMatcher holds. """ pass # pragma: no cover
31.704698
108
0.650296
583
4,724
5.113208
0.262436
0.040255
0.050319
0.028514
0.247903
0.146595
0.123784
0.088561
0.037571
0.037571
0
0.003188
0.269687
4,724
148
109
31.918919
0.86087
0.541702
0
0.2
0
0
0
0
0
0
0
0
0
1
0.266667
false
0.033333
0.1
0
0.633333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
5eeb79ff59fe8c898948a4d629f95025dddf840d
1,843
py
Python
gQuant/plugins/hrp_plugin/greenflow_hrp_plugin/__init__.py
t-triobox/gQuant
6ee3ba104ce4c6f17a5755e7782298902d125563
[ "Apache-2.0" ]
null
null
null
gQuant/plugins/hrp_plugin/greenflow_hrp_plugin/__init__.py
t-triobox/gQuant
6ee3ba104ce4c6f17a5755e7782298902d125563
[ "Apache-2.0" ]
null
null
null
gQuant/plugins/hrp_plugin/greenflow_hrp_plugin/__init__.py
t-triobox/gQuant
6ee3ba104ce4c6f17a5755e7782298902d125563
[ "Apache-2.0" ]
null
null
null
""" //////////////////////////////////////////////////////////////////////////// // // Copyright (C) NVIDIA Corporation. All rights reserved. // // NVIDIA Sample Code // // Please refer to the NVIDIA end user license agreement (EULA) associated // with this source code for terms and conditions that govern your use of // this software. Any use, reproduction, disclosure, or distribution of // this software and related documentation outside the terms of the EULA // is strictly prohibited. // //////////////////////////////////////////////////////////////////////////// """ from .loadCsvNode import LoadCsvNode from .bootstrapNode import BootstrapNode from .logReturnNode import LogReturnNode from .distanceNode import DistanceNode from .hierarchicalClusteringNode import HierarchicalClusteringNode from .hrpWeight import HRPWeightNode from .portfolioNode import PortfolioNode from .performanceMetricNode import PerformanceMetricNode from .nrpWeightNode import NRPWeightNode from .maxDrawdownNode import MaxDrawdownNode from .featureNode import FeatureNode from .aggregateTimeFeature import AggregateTimeFeatureNode from .mergeNode import MergeNode from .diffNode import DiffNode from .rSquaredNode import RSquaredNode from .shapSummaryPlotNode import ShapSummaryPlotPlotNode from .leverageNode import LeverageNode from .rawDataNode import RawDataNode from .transactionCostNode import TransactionCostNode __all__ = ["LoadCsvNode", "BootstrapNode", "LogReturnNode", "DistanceNode", "HierarchicalClusteringNode", "HRPWeightNode", "PortfolioNode", "PerformanceMetricNode", "NRPWeightNode", "MaxDrawdownNode", "FeatureNode", "AggregateTimeFeatureNode", "MergeNode", "DiffNode", "RSquaredNode", "ShapSummaryPlotPlotNode", "LeverageNode", "RawDataNode", "TransactionCostNode"]
42.860465
78
0.72382
153
1,843
8.69281
0.437909
0.009023
0.021053
0
0
0
0
0
0
0
0
0
0.130222
1,843
42
79
43.880952
0.829694
0.307651
0
0
0
0
0.222311
0.0749
0
0
0
0
0
1
0
false
0
0.76
0
0.76
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
5eefaff8065c5ecea5f5a36834a9168d04d5bd4e
2,403
py
Python
screenblankmgr.py
nsw42/pijuui
fee9b96ceaab6b4f5db7ea41cb86da9a1a3b9eac
[ "BSD-2-Clause" ]
1
2021-12-20T15:02:54.000Z
2021-12-20T15:02:54.000Z
screenblankmgr.py
nsw42/pijuui
fee9b96ceaab6b4f5db7ea41cb86da9a1a3b9eac
[ "BSD-2-Clause" ]
null
null
null
screenblankmgr.py
nsw42/pijuui
fee9b96ceaab6b4f5db7ea41cb86da9a1a3b9eac
[ "BSD-2-Clause" ]
null
null
null
import logging import subprocess class PlayingState: Inactive = 0 Active = 1 class ProfileBase: def __init__(self): raise NotImplementedError() def on_start_playing(self): raise NotImplementedError() def on_stop_playing(self): raise NotImplementedError() def on_playing_tick(self): raise NotImplementedError() def _set_timeout(self, timeout): self._run_xset(str(timeout)) def _run_xset(self, s_arg): cmd = ['xset', 's', s_arg] logging.debug(cmd) subprocess.run(cmd) class ScreenBlankProfileNone(ProfileBase): def __init__(self): pass def on_start_playing(self): pass def on_stop_playing(self): pass def on_playing_tick(self): pass class ScreenBlankProfileBalanced(ProfileBase): def __init__(self): pass def on_start_playing(self): self._set_timeout(self, 300) def on_stop_playing(self): self._set_timeout(self, 30) def on_playing_tick(self): pass class ScreenBlankProfileOnWhenPlaying(ProfileBase): def __init__(self): pass def on_start_playing(self): self._set_timeout(60 * 60) def on_stop_playing(self): self._run_xset('on') self._set_timeout(10) def on_playing_tick(self): self._run_xset('off') self._run_xset('reset') class ScreenBlankMgr: def __init__(self, profile: ProfileBase): self.state = None self.profile = profile self.tick_countdown = 5 def set_state(self, new_state: str): """ new_state in ('playing', 'paused', 'stopped') """ new_state = PlayingState.Active if (new_state == 'playing') else PlayingState.Inactive if self.state == new_state: if self.state == PlayingState.Active: self.tick_countdown -= 1 if self.tick_countdown <= 0: self.profile.on_playing_tick() self.tick_countdown = 5 else: self.state = new_state if self.state == PlayingState.Active: self.profile.on_start_playing() else: self.profile.on_stop_playing() profiles = { 'none': ScreenBlankProfileNone(), 'balanced': ScreenBlankProfileBalanced(), 'onoff': ScreenBlankProfileOnWhenPlaying() }
22.669811
94
0.615481
263
2,403
5.307985
0.205323
0.04298
0.039398
0.060888
0.41404
0.338825
0.234241
0.192693
0.192693
0.192693
0
0.009959
0.289638
2,403
105
95
22.885714
0.80785
0.018727
0
0.452055
0
0
0.01671
0
0
0
0
0
0
1
0.273973
false
0.09589
0.027397
0
0.410959
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
5ef50480947622fa6c85f38cc28d083417268f20
351
py
Python
apps/snippet/admin.py
AniPython/ani
2536ac9ddae2b8396b634f982fb1083339b4a389
[ "MIT" ]
null
null
null
apps/snippet/admin.py
AniPython/ani
2536ac9ddae2b8396b634f982fb1083339b4a389
[ "MIT" ]
null
null
null
apps/snippet/admin.py
AniPython/ani
2536ac9ddae2b8396b634f982fb1083339b4a389
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Tag, Article @admin.register(Tag) class TagAdmin(admin.ModelAdmin): list_display = ('name', 'order') list_editable = ('order',) @admin.register(Article) class ArticleAdmin(admin.ModelAdmin): list_display = ['title', 'author'] readonly_fields = ['create_time', 'update_time']
19.5
52
0.709402
41
351
5.926829
0.609756
0.106996
0.156379
0.213992
0
0
0
0
0
0
0
0
0.150997
351
17
53
20.647059
0.815436
0
0
0
0
0
0.13467
0
0
0
0
0
0
1
0
false
0
0.2
0
0.8
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
5ef67226c4fddb4ea740eed126e252d451b1063d
1,326
py
Python
test/functional/test_framework/script_util.py
TopoX84/newlux
555b9f7f9e4be4ef879f20083d8cf80ed8f7777e
[ "MIT" ]
1,389
2017-06-28T02:35:01.000Z
2022-03-25T20:09:01.000Z
test/functional/test_framework/script_util.py
TopoX84/newlux
555b9f7f9e4be4ef879f20083d8cf80ed8f7777e
[ "MIT" ]
1,039
2015-03-25T23:58:32.000Z
2022-03-30T00:41:16.000Z
test/functional/test_framework/script_util.py
TopoX84/newlux
555b9f7f9e4be4ef879f20083d8cf80ed8f7777e
[ "MIT" ]
564
2017-06-28T03:55:03.000Z
2022-03-30T14:57:40.000Z
#!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Useful Script constants and utils.""" from test_framework.script import CScript # To prevent a "tx-size-small" policy rule error, a transaction has to have a # non-witness size of at least 82 bytes (MIN_STANDARD_TX_NONWITNESS_SIZE in # src/policy/policy.h). Considering a Tx with the smallest possible single # input (blank, empty scriptSig), and with an output omitting the scriptPubKey, # we get to a minimum size of 60 bytes: # # Tx Skeleton: 4 [Version] + 1 [InCount] + 1 [OutCount] + 4 [LockTime] = 10 bytes # Blank Input: 32 [PrevTxHash] + 4 [Index] + 1 [scriptSigLen] + 4 [SeqNo] = 41 bytes # Output: 8 [Amount] + 1 [scriptPubKeyLen] = 9 bytes # # Hence, the scriptPubKey of the single output has to have a size of at # least 22 bytes, which corresponds to the size of a P2WPKH scriptPubKey. # The following script constant consists of a single push of 21 bytes of 'a': # <PUSH_21> <21-bytes of 'a'> # resulting in a 22-byte size. It should be used whenever (small) fake # scriptPubKeys are needed, to guarantee that the minimum transaction size is # met. DUMMY_P2WPKH_SCRIPT = CScript([b'a' * 21])
51
84
0.737557
212
1,326
4.575472
0.584906
0.024742
0.018557
0.020619
0
0
0
0
0
0
0
0.03539
0.168929
1,326
25
85
53.04
0.844828
0.898944
0
0
0
0
0.009259
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
5eff513cdc7ff514a20abc942fb429679a31b4d7
95
py
Python
12_find the output/03_In Python/01_GeeksForGeeks/05_Set Five/problem_4.py
Magdyedwar1996/python-level-one-codes
066086672f43488bc8b32c620b5e2f94cedfe3da
[ "MIT" ]
1
2021-11-16T14:14:38.000Z
2021-11-16T14:14:38.000Z
12_find the output/03_In Python/01_GeeksForGeeks/05_Set Five/problem_4.py
Magdyedwar1996/python-level-one-codes
066086672f43488bc8b32c620b5e2f94cedfe3da
[ "MIT" ]
null
null
null
12_find the output/03_In Python/01_GeeksForGeeks/05_Set Five/problem_4.py
Magdyedwar1996/python-level-one-codes
066086672f43488bc8b32c620b5e2f94cedfe3da
[ "MIT" ]
null
null
null
def gfg(x,l = []): for i in range(x): l.append(i*i) print(l) gfg(2) gfg(3,[3,2,1]) gfg(3)
10.555556
19
0.526316
24
95
2.083333
0.541667
0.08
0
0
0
0
0
0
0
0
0
0.077922
0.189474
95
8
20
11.875
0.571429
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.142857
0.142857
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
6f0bf095397f81c3ceab712d5eed93ca0139a752
1,319
py
Python
i_vis/core/login.py
piechottam/i-vis-core
0b90300d1ae8b96d28a80802c1300dd861ad6f4e
[ "MIT" ]
null
null
null
i_vis/core/login.py
piechottam/i-vis-core
0b90300d1ae8b96d28a80802c1300dd861ad6f4e
[ "MIT" ]
null
null
null
i_vis/core/login.py
piechottam/i-vis-core
0b90300d1ae8b96d28a80802c1300dd861ad6f4e
[ "MIT" ]
null
null
null
""" Flask LoginManager plugin. Import and execute ``login.init_app(app)`` in a factory function to use. """ from typing import Any, Callable, TYPE_CHECKING from functools import wraps from flask import redirect, request, url_for, current_app from flask_login import current_user from flask_login.login_manager import LoginManager from .errors import IllegalAccessError if TYPE_CHECKING: from werkzeug.wrappers import Response login = LoginManager() def admin_required(func: Callable) -> Callable: """Make view only accessible to admins. Args: func: Callabe to wrap. Returns: Wrapped callable - only callable when user is an admin. """ @wraps(func) def decorated_view(*args: Any, **kwargs: Any) -> Any: if not current_app.config.get("LOGIN_DISABLED", True) and ( current_user is None or not current_user.is_authenticated or not current_user.is_admin ): # TODO # move flash_permission_denied() # move return redirect(url_for("main.index")) raise IllegalAccessError return func(*args, **kwargs) return decorated_view @login.unauthorized_handler def unauthorized_callback() -> "Response": return redirect(url_for("main.signin", next=request.path))
25.862745
72
0.686884
163
1,319
5.404908
0.478528
0.049943
0.044268
0.036322
0.095346
0
0
0
0
0
0
0
0.230478
1,319
50
73
26.38
0.86798
0.244124
0
0
0
0
0.034375
0
0
0
0
0.02
0
1
0.130435
false
0
0.304348
0.043478
0.565217
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
2
6f0fd9711f448e832198d3798ba9ecf322599507
680
py
Python
src/M5_random_module.py
posguy99/comp660-fall2020
0fbf5b660fe8863bf9754b5227fe47dd03dc2291
[ "MIT" ]
null
null
null
src/M5_random_module.py
posguy99/comp660-fall2020
0fbf5b660fe8863bf9754b5227fe47dd03dc2291
[ "MIT" ]
null
null
null
src/M5_random_module.py
posguy99/comp660-fall2020
0fbf5b660fe8863bf9754b5227fe47dd03dc2291
[ "MIT" ]
null
null
null
import random # use of the random module print(random.random()) # a float value >= 0.0 and < 1.0 print(random.random()*100) # a float value >= 0.0 and < 100.0 # use of the randint method print(random.randint(1, 100)) # an int from 1 to 100 print(random.randint(101, 200)) # an int from 101 to 200 print(random.randint(0, 7)) # an int from 0 7 die1 = random.randint(1, 6) die2 = random.randint(1, 6) print("Your roll: ", die1, die2) print(random.randrange(1, 100)) # an int from 1 to 99 print(random.randrange(100, 200, 2)) # an even int from 100 to 198 print(random.randrange(11, 250, 2)) # an odd int from 11 to 249
35.789474
73
0.627941
119
680
3.588235
0.310924
0.206089
0.084309
0.056206
0.149883
0.149883
0.074941
0
0
0
0
0.148148
0.245588
680
18
74
37.777778
0.684211
0.364706
0
0
0
0
0.026128
0
0
0
0
0
0
1
0
false
0
0.083333
0
0.083333
0.75
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
6f484367a2e17cf732eb810bd88c47b5caccd1c1
166
py
Python
app/src/constants.py
hubacekjirka/dailyPhotoTwitterBot
abd490b73603883d4e71bfa6076e9925a055fcb7
[ "MIT" ]
1
2020-03-16T10:51:07.000Z
2020-03-16T10:51:07.000Z
app/src/constants.py
hubacekjirka/dailyPhotoTwitterBot
abd490b73603883d4e71bfa6076e9925a055fcb7
[ "MIT" ]
6
2019-08-11T10:00:36.000Z
2021-06-02T00:18:58.000Z
app/src/constants.py
hubacekjirka/dailyPhotoTwitterBot
abd490b73603883d4e71bfa6076e9925a055fcb7
[ "MIT" ]
2
2019-09-30T18:45:47.000Z
2021-01-09T10:38:14.000Z
friendly_camera_mapping = { "GM1913": "Oneplus 7 Pro", "FC3170": "Mavic Air 2", # An analogue scanner in FilmNeverDie "SP500": "Canon AE-1 Program" }
23.714286
41
0.638554
21
166
4.952381
1
0
0
0
0
0
0
0
0
0
0
0.109375
0.228916
166
6
42
27.666667
0.703125
0.210843
0
0
0
0
0.457364
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
6f6c2c1c13418649733376c632ea6395a15039ac
857
py
Python
medium/python3/c0108_223_rectangle-area/00_leetcode_0108.py
drunkwater/leetcode
8cc4a07763e71efbaedb523015f0c1eff2927f60
[ "Ruby" ]
null
null
null
medium/python3/c0108_223_rectangle-area/00_leetcode_0108.py
drunkwater/leetcode
8cc4a07763e71efbaedb523015f0c1eff2927f60
[ "Ruby" ]
null
null
null
medium/python3/c0108_223_rectangle-area/00_leetcode_0108.py
drunkwater/leetcode
8cc4a07763e71efbaedb523015f0c1eff2927f60
[ "Ruby" ]
3
2018-02-09T02:46:48.000Z
2021-02-20T08:32:03.000Z
# DRUNKWATER TEMPLATE(add description and prototypes) # Question Title and Description on leetcode.com # Function Declaration and Function Prototypes on leetcode.com #223. Rectangle Area #Find the total area covered by two rectilinear rectangles in a 2D plane. #Each rectangle is defined by its bottom left corner and top right corner as shown in the figure. #Assume that the total area is never beyond the maximum possible value of int. #Credits: #Special thanks to @mithmatt for adding this problem, creating the above image and all test cases. #class Solution: # def computeArea(self, A, B, C, D, E, F, G, H): # """ # :type A: int # :type B: int # :type C: int # :type D: int # :type E: int # :type F: int # :type G: int # :type H: int # :rtype: int # """ # Time Is Money
32.961538
98
0.655776
128
857
4.390625
0.625
0.087189
0.046263
0
0
0
0
0
0
0
0
0.006319
0.261377
857
26
99
32.961538
0.881517
0.942824
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
489780fa9ccacfe9a097c426e6e4d2cf96e01913
163
py
Python
python-peculiarities/source/MultiplicationComplication.py
noamt/presentations
c5031ae0558d19be920ee1641ba2fc5f4fd88773
[ "Unlicense" ]
null
null
null
python-peculiarities/source/MultiplicationComplication.py
noamt/presentations
c5031ae0558d19be920ee1641ba2fc5f4fd88773
[ "Unlicense" ]
null
null
null
python-peculiarities/source/MultiplicationComplication.py
noamt/presentations
c5031ae0558d19be920ee1641ba2fc5f4fd88773
[ "Unlicense" ]
null
null
null
# https://codegolf.stackexchange.com/a/11480 multiplication = [] for i in range(10): multiplication.append(i * (i + 1)) for x in multiplication: print(x)
20.375
44
0.680982
23
163
4.826087
0.695652
0
0
0
0
0
0
0
0
0
0
0.059259
0.171779
163
8
45
20.375
0.762963
0.257669
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
48acd263a6439030b9241f1881827f94f5753592
677
py
Python
pysyte/oss/linux.py
git-wwts/pysyte
625658138cdb5affc1a6a89a9f2c7e3667ee80c2
[ "MIT" ]
1
2021-11-10T15:24:36.000Z
2021-11-10T15:24:36.000Z
pysyte/oss/linux.py
git-wwts/pysyte
625658138cdb5affc1a6a89a9f2c7e3667ee80c2
[ "MIT" ]
12
2020-01-15T00:19:41.000Z
2021-05-11T14:52:04.000Z
pysyte/oss/linux.py
git-wwts/pysyte
625658138cdb5affc1a6a89a9f2c7e3667ee80c2
[ "MIT" ]
2
2015-01-31T11:51:06.000Z
2015-01-31T21:29:19.000Z
"""Linux-specific code""" from pysyte.types import paths def xdg_home(): """path to $XDG_CONFIG_HOME >>> assert xdg_home() == paths.path('~/.config').expand() """ return paths.environ_path("XDG_CONFIG_HOME", "~/.config") def xdg_home_config(filename): """path to that file in $XDG_CONFIG_HOME >>> assert xdg_home_config('fred') == paths.path('~/.config/fred').expand() """ return xdg_home() / filename def xdg_dirs(): """paths in $XDG_CONFIG_DIRS""" return paths.environ_paths("XDG_CONFIG_DIRS") def xdg_homes(): return [xdg_home()] bash_paste = "xclip -selection clipboard" bash_copy = "xclip -selection clipboard -o"
19.911765
79
0.660266
90
677
4.7
0.355556
0.099291
0.092199
0.089835
0.122931
0.122931
0
0
0
0
0
0
0.174298
677
33
80
20.515152
0.756708
0.360414
0
0
0
0
0.238579
0
0
0
0
0
0
1
0.363636
false
0
0.090909
0.090909
0.818182
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
48b3cc7ab2adb8652b3ac164a64a50173d354d2a
4,759
py
Python
PlatformerGame/malmopy/explorers.py
MrMaik/platformer-ml-game
bbcabe3ddea1e3cfddb01b4cd60c8dd1bd79acac
[ "MIT" ]
10
2020-01-05T19:33:33.000Z
2022-02-04T14:56:09.000Z
PlatformerGame/malmopy/explorers.py
MrMaik/platformer-ml-game
bbcabe3ddea1e3cfddb01b4cd60c8dd1bd79acac
[ "MIT" ]
1
2019-12-18T15:16:44.000Z
2019-12-18T15:16:44.000Z
PlatformerGame/malmopy/explorers.py
MrMaik/platformer-ml-game
bbcabe3ddea1e3cfddb01b4cd60c8dd1bd79acac
[ "MIT" ]
6
2019-12-18T14:45:37.000Z
2021-09-13T12:48:28.000Z
# -------------------------------------------------------------------------------------------------- # Copyright (c) 2018 Microsoft Corporation # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and # associated documentation files (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, publish, distribute, # sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or # substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT # NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # -------------------------------------------------------------------------------------------------- """Module containing explorer classes""" from numpy import random as np_random from .summaries import ScalarSummary from .triggers import each_step from .abc import Explorer, EpsilonFunction, Visualizable class ConstantEpsilon(EpsilonFunction): """Epsilon function which returns a constant value regardless of step.""" def __init__(self, epsilon): """ Args: epsilon -- the constant epsilon value """ self._epsilon = epsilon def epsilon(self, step): return self._epsilon class LinearEpsilon(EpsilonFunction): """ This function uses linear interpolation between epsilon_max and epsilon_min to linearly anneal epsilon as a function of the current episode. 3 cases exist: - If 0 <= episode < eps_min_time then epsilon = interpolator(episode) - If episode >= eps_min_time then epsilon then epsilon = eps_min - Otherwise epsilon = eps_max """ def __init__(self, eps_max, eps_min, eps_min_time): """ Args: eps_max -- the maximum epsilon value eps_min -- the minimum epsilon value eps_min_time -- the number of steps until epsilon is at its minimum """ assert eps_max > eps_min assert eps_min_time > 0 self._eps_min_time = eps_min_time self._eps_min = eps_min self._eps_max = eps_max self._delta = -(eps_max - eps_min) / eps_min_time def epsilon(self, step): """The epsilon value at a specific step. Args: step -- the step during training """ if step < 0: return self._eps_max if step > self._eps_min_time: return self._eps_min return self._delta * step + self._eps_max class EpsilonGreedyExplorer(Explorer, Visualizable): """Explorer which determines whether to explore by sampling from a Bernoulli distribution.""" def __init__(self, epsilon_function): """ Args: epsilon_function -- an instance of EpsilonFunction """ assert isinstance(epsilon_function, EpsilonFunction) self._epsilon = epsilon_function self._epsilon_summary = ScalarSummary("EpsilonGreedy/Epsilon", each_step()) @property def metrics(self): return [self._epsilon_summary] def is_exploring(self, step): epsilon = self._epsilon(step) self._epsilon_summary.add(epsilon) return np_random.binomial(1, epsilon) def explore(self, step, action_space): return action_space.sample() class ConstantExplorer(EpsilonGreedyExplorer): """Explorer which explores with a constant probability.""" def __init__(self, epsilon): """ Args: epsilon -- the probability that the agent will explore """ super(ConstantExplorer, self).__init__(ConstantEpsilon(epsilon)) class LinearEpsilonGreedyExplorer(EpsilonGreedyExplorer): """Explorer which uses a LinearEpsilon function.""" def __init__(self, eps_max, eps_min, eps_min_time): """ Args: eps_max -- the maximum epsilon value eps_min -- the minimum epsilon value eps_min_time -- the number of steps until epsilon is at its minimum """ epsilon_function = LinearEpsilon(eps_max, eps_min, eps_min_time) super(LinearEpsilonGreedyExplorer, self).__init__(epsilon_function)
36.328244
100
0.65497
566
4,759
5.314488
0.325088
0.045878
0.039894
0.019947
0.150931
0.150931
0.132314
0.09641
0.09641
0.09641
0
0.002479
0.237025
4,759
130
101
36.607692
0.825943
0.519647
0
0.136364
0
0
0.010547
0.010547
0
0
0
0
0.068182
1
0.227273
false
0
0.090909
0.068182
0.590909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
48d7564242b45a65bf822b185e1203ecbd2093a0
773
py
Python
3 assignment/number_of_digits_unitest.py
nastae/programavimas_python
7e65ad834c5f52e146fb5fcd0408b344545dc30e
[ "Apache-2.0" ]
null
null
null
3 assignment/number_of_digits_unitest.py
nastae/programavimas_python
7e65ad834c5f52e146fb5fcd0408b344545dc30e
[ "Apache-2.0" ]
null
null
null
3 assignment/number_of_digits_unitest.py
nastae/programavimas_python
7e65ad834c5f52e146fb5fcd0408b344545dc30e
[ "Apache-2.0" ]
null
null
null
import unittest def number_of_digits(s): return sum(c.isdigit() for c in s) # Parašykite funkcijai X unittest'us class Test(unittest.TestCase): def test_only_digits(self): s = "123456789" self.assertEqual(number_of_digits(s), 9) def test_only_letters(self): s = "abcdef" self.assertEqual(number_of_digits(s), 0) def test_digits_between_letters(self): s = "asd123asd123asd" self.assertEqual(number_of_digits(s), 6) def test_letters_between_digits(self): s = "123asd123asd123" self.assertEqual(number_of_digits(s), 9) def test_neither_letter_or_digit(self): s = ",./;';'[]`" self.assertEqual(number_of_digits(s), 0) if __name__ == '__main__': unittest.main()
24.935484
48
0.65718
102
773
4.647059
0.382353
0.101266
0.177215
0.189873
0.35443
0.35443
0.291139
0.160338
0.160338
0
0
0.048333
0.223803
773
30
49
25.766667
0.741667
0.043984
0
0.190476
0
0
0.085482
0
0
0
0
0
0.238095
1
0.285714
false
0
0.047619
0.047619
0.428571
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
48ebc333c8d0ba26cd1d7f0f9c59510601ab4ec4
1,788
py
Python
cloudkitty/rating/hash/controllers/root.py
wanghuiict/cloudkitty
11ff713042eb0354f497f7051130630c46860735
[ "Apache-2.0" ]
97
2015-10-18T02:53:17.000Z
2022-03-07T05:15:39.000Z
cloudkitty/rating/hash/controllers/root.py
shanafang9/cloudkitty
911c90569ccb09ecf0d7aa11a5a707c8ebda09cf
[ "Apache-2.0" ]
1
2017-11-29T15:39:27.000Z
2017-11-29T15:39:27.000Z
cloudkitty/rating/hash/controllers/root.py
shanafang9/cloudkitty
911c90569ccb09ecf0d7aa11a5a707c8ebda09cf
[ "Apache-2.0" ]
54
2015-10-27T10:55:02.000Z
2022-02-18T08:23:19.000Z
# -*- coding: utf-8 -*- # Copyright 2015 Objectif Libre # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from wsme import types as wtypes import wsmeext.pecan as wsme_pecan from cloudkitty import rating from cloudkitty.rating.hash.controllers import field as field_api from cloudkitty.rating.hash.controllers import group as group_api from cloudkitty.rating.hash.controllers import mapping as mapping_api from cloudkitty.rating.hash.controllers import service as service_api from cloudkitty.rating.hash.controllers import threshold as threshold_api from cloudkitty.rating.hash.datamodels import mapping as mapping_models class HashMapConfigController(rating.RatingRestControllerBase): """Controller exposing all management sub controllers.""" _custom_actions = { 'types': ['GET'] } services = service_api.HashMapServicesController() fields = field_api.HashMapFieldsController() groups = group_api.HashMapGroupsController() mappings = mapping_api.HashMapMappingsController() thresholds = threshold_api.HashMapThresholdsController() @wsme_pecan.wsexpose([wtypes.text]) def get_types(self): """Return the list of every mapping type available. """ return mapping_models.MAP_TYPE.values
38.042553
78
0.758949
226
1,788
5.924779
0.517699
0.073189
0.089619
0.107543
0.182226
0.162061
0.131441
0
0
0
0
0.006069
0.170582
1,788
46
79
38.869565
0.896831
0.39877
0
0
0
0
0.007619
0
0
0
0
0
0
1
0.047619
false
0
0.428571
0
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
48edd7f48e568a644eaeb1b10b708e137aa7c9cf
433
py
Python
src/OTLMOW/OEFModel/Classes/Wilddet.py
davidvlaminck/OTLClassPython
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
[ "MIT" ]
2
2022-02-01T08:58:11.000Z
2022-02-08T13:35:17.000Z
src/OTLMOW/OEFModel/Classes/Wilddet.py
davidvlaminck/OTLMOW
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
[ "MIT" ]
null
null
null
src/OTLMOW/OEFModel/Classes/Wilddet.py
davidvlaminck/OTLMOW
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
[ "MIT" ]
null
null
null
# coding=utf-8 from OTLMOW.OEFModel.EMObject import EMObject # Generated with OEFClassCreator. To modify: extend, do not edit class Wilddet(EMObject): """Een wilddetectiesysteem zal de weggebruikers waarschuwen bij de aanwezigheid van eventueel overstekend wild""" typeURI = 'https://lgc.data.wegenenverkeer.be/ns/installatie#Wilddet' label = 'Wilddetectiesysteem' def __init__(self): super().__init__()
28.866667
117
0.745958
50
433
6.3
0.88
0
0
0
0
0
0
0
0
0
0
0.002747
0.159353
433
14
118
30.928571
0.862637
0.424942
0
0
1
0
0.315353
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.833333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
48f3e0cd5e4cb55eec34f20d3487909f95548f7a
1,418
py
Python
utipy/array/blend.py
LudvigOlsen/utipy
c287f7eed15b3591118bba49ecdfc2b2605f59a0
[ "MIT" ]
null
null
null
utipy/array/blend.py
LudvigOlsen/utipy
c287f7eed15b3591118bba49ecdfc2b2605f59a0
[ "MIT" ]
1
2022-02-16T15:24:33.000Z
2022-02-16T15:24:33.000Z
utipy/array/blend.py
LudvigOlsen/utipy
c287f7eed15b3591118bba49ecdfc2b2605f59a0
[ "MIT" ]
null
null
null
""" @author: ludvigolsen """ from typing import Union import numpy as np import pandas as pd from utipy.utils.check_instance import check_instance from utipy.utils.convert_to_type import convert_to_type def blend(x1: Union[list, np.ndarray, pd.Series], x2: Union[list, np.ndarray, pd.Series], amount: float = 0.5) -> Union[list, np.ndarray, pd.Series]: """ Blend two arrays Parameters ---------- x1 : list, np.ndarray, pd.Series The first array. x2 : list, np.ndarray, pd.Series The second array. amount : float Blend rate. Percentage between 0-1 0: Keep only x1. 1: Keep only x2. 0.1: 10% x2 / 90% x1. A value in-between 0-1 will result in integers becoming floats. Returns ------- list, np.ndarray, pd.Series Blended array with type of the original (x1) Examples -------- Uncomment code to run. # x1 = [1,2,3,4,5] # x2 = [4,5,6,7,8] # blend(x1, x2, amount = 0.5) returns [2.5,3.5,4.5,5.5,6.5] """ # Get instance types (np.ndarray, list, pd.Series) instance_type = check_instance(x1) x1_weighted = np.multiply(x1, (1 - amount)) x2_weighted = np.multiply(x2, amount) blended = x1_weighted + x2_weighted # Convert to original type (np.ndarray, list, pd.Series) return convert_to_type(blended, instance_type)
24.448276
149
0.608604
209
1,418
4.057416
0.344498
0.084906
0.091981
0.106132
0.222877
0.148585
0
0
0
0
0
0.054808
0.266573
1,418
57
150
24.877193
0.760577
0.51481
0
0
0
0
0
0
0
0
0
0
0
1
0.090909
false
0
0.454545
0
0.636364
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
48f9216cd7b0c9c64c3f0cc8145822d20126a1a1
572
py
Python
python/random-videogame-generator.py
iamashiq/Hacktoberfest2021-2
9823996e9e97a25fcf70abc6fd6c55e4b60da568
[ "MIT" ]
6
2021-10-04T07:57:24.000Z
2021-11-15T13:35:21.000Z
python/random-videogame-generator.py
iamashiq/Hacktoberfest2021-2
9823996e9e97a25fcf70abc6fd6c55e4b60da568
[ "MIT" ]
2
2021-10-14T16:55:50.000Z
2021-10-31T12:17:20.000Z
python/random-videogame-generator.py
iamashiq/Hacktoberfest2021-2
9823996e9e97a25fcf70abc6fd6c55e4b60da568
[ "MIT" ]
33
2021-10-03T05:00:58.000Z
2021-11-05T19:49:19.000Z
print("Are you trying to find new videogames to play?") print("let me help you!") print("do you like shooting games, yes or no") shooting=input() if shooting = "yes" print("do you like battle royale games?") br=input() if br="yes" print("you should try out call of duty!") else if br="no" print("you should try overwatch!") else if shooting="no" print("do you like sports games, yes or no") sports=input() if sports="yes" print("try out Fifa or NBA2k!") else if sports="no" print("I know, try out rocket league!")
30.105263
55
0.636364
93
572
3.913978
0.419355
0.057692
0.082418
0.115385
0
0
0
0
0
0
0
0.002288
0.236014
572
18
56
31.777778
0.830664
0
0
0
0
0
0.506993
0
0
0
0
0
0
0
null
null
0
0
null
null
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
2
48fb1aa9e5e10603d8a878537cb85772b452f285
468
py
Python
iot/iot_portal/doctype/iot_homepage/iot_homepage.py
srdgame/symlink_iot
6ec524498cccaf2f49f7264a3b284a8956bd430c
[ "MIT" ]
4
2017-09-26T09:21:19.000Z
2021-12-22T10:26:36.000Z
iot/iot_portal/doctype/iot_homepage/iot_homepage.py
srdgame/symlink_iot
6ec524498cccaf2f49f7264a3b284a8956bd430c
[ "MIT" ]
1
2017-11-21T20:53:10.000Z
2017-12-11T02:17:06.000Z
iot/iot_portal/doctype/iot_homepage/iot_homepage.py
srdgame/symlink_iot
6ec524498cccaf2f49f7264a3b284a8956bd430c
[ "MIT" ]
9
2017-03-17T04:12:22.000Z
2022-03-21T09:33:11.000Z
# -*- coding: utf-8 -*- # Copyright (c) 2017, Dirk Chang and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document from frappe.website.utils import delete_page_cache class IOTHomepage(Document): def validate(self): if not self.description: self.description = frappe._("This is an example website auto-generated from IOT") delete_page_cache('iot_home')
31.2
84
0.782051
65
468
5.461538
0.692308
0.056338
0.084507
0
0
0
0
0
0
0
0
0.012315
0.132479
468
14
85
33.428571
0.862069
0.25
0
0
0
0
0.167147
0
0
0
0
0
0
1
0.111111
false
0
0.444444
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
48fb52f8c130468ec6ba0fdb93a761de09a44b65
368
py
Python
src/garage/envs/env_spec.py
Maltimore/garage
a3f44b37eeddca37d157766a9a72e8772f104bcd
[ "MIT" ]
2
2020-03-15T14:35:15.000Z
2021-02-15T16:38:00.000Z
src/garage/envs/env_spec.py
Maltimore/garage
a3f44b37eeddca37d157766a9a72e8772f104bcd
[ "MIT" ]
null
null
null
src/garage/envs/env_spec.py
Maltimore/garage
a3f44b37eeddca37d157766a9a72e8772f104bcd
[ "MIT" ]
1
2020-02-24T03:04:23.000Z
2020-02-24T03:04:23.000Z
"""EnvSpec class.""" class EnvSpec: """EnvSpec class. Args: observation_space (akro.Space): The observation space of the env. action_space (akro.Space): The action space of the env. """ def __init__(self, observation_space, action_space): self.observation_space = observation_space self.action_space = action_space
23
73
0.668478
44
368
5.318182
0.295455
0.34188
0.119658
0.145299
0
0
0
0
0
0
0
0
0.236413
368
15
74
24.533333
0.83274
0.451087
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
48fc04ddecaf2a0349002da2c688a1f9e69caacb
105
py
Python
exercises/exe41 - 50/exe047.py
thomas-rohde/Classes-Python
f862995510b7aabf68bc14aecf815f597034d8a1
[ "MIT" ]
null
null
null
exercises/exe41 - 50/exe047.py
thomas-rohde/Classes-Python
f862995510b7aabf68bc14aecf815f597034d8a1
[ "MIT" ]
null
null
null
exercises/exe41 - 50/exe047.py
thomas-rohde/Classes-Python
f862995510b7aabf68bc14aecf815f597034d8a1
[ "MIT" ]
null
null
null
t = int(input('Digite um nº: ')) for t0 in range(1, 11): print('{} X {} = {}'.format(t, t0, t * t0))
26.25
47
0.495238
19
105
2.736842
0.789474
0.115385
0
0
0
0
0
0
0
0
0
0.074074
0.228571
105
3
48
35
0.567901
0
0
0
0
0
0.247619
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
48fe1f175aa02923066c86fda95e2c0081a49955
98,484
py
Python
pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module CISCO-DIAMETER-BASE-PROTOCOL-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-DIAMETER-BASE-PROTOCOL-MIB # Produced by pysmi-0.3.4 at Wed May 1 11:54:20 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint") ciscoExperiment, = mibBuilder.importSymbols("CISCO-SMI", "ciscoExperiment") InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress") SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString") ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup") Gauge32, ObjectIdentity, Unsigned32, NotificationType, iso, MibIdentifier, Counter64, Counter32, Bits, Integer32, ModuleIdentity, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ObjectIdentity", "Unsigned32", "NotificationType", "iso", "MibIdentifier", "Counter64", "Counter32", "Bits", "Integer32", "ModuleIdentity", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks") RowStatus, StorageType, TruthValue, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "StorageType", "TruthValue", "DisplayString", "TextualConvention") ciscoDiameterBasePMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 10, 133)) ciscoDiameterBasePMIB.setRevisions(('2006-08-24 00:01',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setRevisionsDescriptions(('Initial version of this MIB module.',)) if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setLastUpdated('200608240001Z') if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setOrganization('Cisco Systems, Inc.') if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: cs-aaa@cisco.com') if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setDescription("The MIB module for entities implementing the Diameter Base Protocol. Initial Cisco'ized version of the IETF draft draft-zorn-dime-diameter-base-protocol-mib-00.txt.") ciscoDiameterBasePMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 0)) ciscoDiameterBasePMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1)) ciscoDiameterBasePMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2)) cdbpLocalCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1)) cdbpLocalStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2)) cdbpPeerCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3)) cdbpPeerStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4)) cdbpRealmCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5)) cdbpRealmStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6)) cdbpTrapCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7)) ciscoDiaBaseProtEnableProtocolErrorNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 1), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: ciscoDiaBaseProtEnableProtocolErrorNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtEnableProtocolErrorNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtProtocolErrorNotif notification.') ciscoDiaBaseProtProtocolErrorNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsProtocolErrors")) if mibBuilder.loadTexts: ciscoDiaBaseProtProtocolErrorNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtProtocolErrorNotif.setDescription('An ciscoDiaBaseProtProtocolErrorNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnableProtocolErrorNotif is true(1) 2) the value of cdbpPeerStatsProtocolErrors changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.') ciscoDiaBaseProtEnableTransientFailureNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 2), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: ciscoDiaBaseProtEnableTransientFailureNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtEnableTransientFailureNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtTransientFailureNotif notification.') ciscoDiaBaseProtTransientFailureNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 2)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransientFailures")) if mibBuilder.loadTexts: ciscoDiaBaseProtTransientFailureNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtTransientFailureNotif.setDescription('An ciscoDiaBaseProtTransientFailureNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnableTransientFailureNotif is true(1) 2) the value of cdbpPeerStatsTransientFailures changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.') ciscoDiaBaseProtEnablePermanentFailureNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 3), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePermanentFailureNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePermanentFailureNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPermanentFailureNotif notification.') ciscoDiaBaseProtPermanentFailureNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 3)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsPermanentFailures")) if mibBuilder.loadTexts: ciscoDiaBaseProtPermanentFailureNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtPermanentFailureNotif.setDescription('An ciscoDiaBaseProtPermanentFailureNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePermanentFailureNotif is true(1) 2) the value of cdbpPeerStatsPermanentFailures changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.') ciscoDiaBaseProtEnablePeerConnectionDownNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 4), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionDownNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionDownNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPeerConnectionDownNotif notification.') ciscoDiaBaseProtPeerConnectionDownNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 4)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId")) if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionDownNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionDownNotif.setDescription('An ciscoDiaBaseProtPeerConnectionDownNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePeerConnectionDownNotif is true(1) 2) cdbpPeerStatsState changes to closed(1). It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.') ciscoDiaBaseProtEnablePeerConnectionUpNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 5), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionUpNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionUpNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPeerConnectionUpNotif notification.') ciscoDiaBaseProtPeerConnectionUpNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 5)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId")) if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionUpNotif.setStatus('current') if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionUpNotif.setDescription('An ciscoDiaBaseProtPeerConnectionUpNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePeerConnectionUpNotif is true(1) 2) the value of cdbpPeerStatsState changes to either rOpen(6)or iOpen(7). It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.') cdbpLocalId = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 1), SnmpAdminString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalId.setStatus('current') if mibBuilder.loadTexts: cdbpLocalId.setDescription("The implementation identification string for the Diameter software in use on the system, for example; 'diameterd'") cdbpLocalIpAddrTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2), ) if mibBuilder.loadTexts: cdbpLocalIpAddrTable.setStatus('current') if mibBuilder.loadTexts: cdbpLocalIpAddrTable.setDescription("The table listing the Diameter local host's IP Addresses.") cdbpLocalIpAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddrIndex")) if mibBuilder.loadTexts: cdbpLocalIpAddrEntry.setStatus('current') if mibBuilder.loadTexts: cdbpLocalIpAddrEntry.setDescription('A row entry representing a Diameter local host IP Address.') cdbpLocalIpAddrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpLocalIpAddrIndex.setStatus('current') if mibBuilder.loadTexts: cdbpLocalIpAddrIndex.setDescription('A number uniquely identifying the number of IP Addresses supported by this Diameter host.') cdbpLocalIpAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 2), InetAddressType()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalIpAddrType.setStatus('current') if mibBuilder.loadTexts: cdbpLocalIpAddrType.setDescription('The type of internet address stored in cdbpLocalIpAddress.') cdbpLocalIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 3), InetAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalIpAddress.setStatus('current') if mibBuilder.loadTexts: cdbpLocalIpAddress.setDescription('The IP-Address of the host, which is of the type specified in cdbpLocalIpAddrType.') cdbpLocalTcpListenPort = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalTcpListenPort.setStatus('current') if mibBuilder.loadTexts: cdbpLocalTcpListenPort.setDescription("This object represents Diameter TCP 'listen' port.") cdbpLocalSctpListenPort = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalSctpListenPort.setStatus('current') if mibBuilder.loadTexts: cdbpLocalSctpListenPort.setDescription("This object represents Diameter SCTP 'listen' port.") cdbpLocalOriginHost = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 5), SnmpAdminString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cdbpLocalOriginHost.setStatus('current') if mibBuilder.loadTexts: cdbpLocalOriginHost.setDescription('This object represents the Local Origin Host.') cdbpLocalRealm = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 6), SnmpAdminString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalRealm.setStatus('current') if mibBuilder.loadTexts: cdbpLocalRealm.setDescription('This object represents the Local Realm Name.') cdbpRedundancyEnabled = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 7), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: cdbpRedundancyEnabled.setStatus('current') if mibBuilder.loadTexts: cdbpRedundancyEnabled.setDescription('This parameter indicates if cisco redundancy has been enabled, it is enabled if set to true and disabled if set to false.') cdbpRedundancyInfraState = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("rfUnknown", 0), ("rfDisabled", 1), ("rfInitialization", 2), ("rfNegotiation", 3), ("rfStandbyCold", 4), ("rfStandbyConfig", 5), ("rfStandbyFileSys", 6), ("rfStandbyBulk", 7), ("rfStandbyHot", 8), ("rfActiveFast", 9), ("rfActiveDrain", 10), ("rfActivePreconfig", 11), ("rfActivePostconfig", 12), ("rfActive", 13), ("rfActiveExtraload", 14)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRedundancyInfraState.setStatus('current') if mibBuilder.loadTexts: cdbpRedundancyInfraState.setDescription("This parameter indicates the current state of cisco redundancy infrastructure state. rfUnknown(0) - unknown state rfDisabled(1) - RF is not functioning at this time rfInitialization(2) - co-ordinating init with platform rfNegotiation(3) - initial negotiation with peer to determine active-standby rfStandbyCold(4) - peer is active, we're cold rfStandbyConfig(5) - sync config from active to standby rfStandbyFileSys(6) - sync file sys from active to standby rfStandbyBulk(7) - clients bulk sync from active to standby rfStandbyHot(8) - standby ready-n-able to be active rfActiveFast(9) - immediate notification of standby going active rfActiveDrain(10) - drain queued messages from peer rfActivePreconfig(11) - active and before config rfActivePostconfig(12) - active and post config rfActive(13) - actively processing new calls rfActiveExtraload(14) - actively processing new calls extra resources other Processing is failed and I have extra load.") cdbpRedundancyLastSwitchover = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 9), SnmpAdminString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRedundancyLastSwitchover.setStatus('current') if mibBuilder.loadTexts: cdbpRedundancyLastSwitchover.setDescription('This object represents the Last Switchover Time.') cdbpLocalApplTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10), ) if mibBuilder.loadTexts: cdbpLocalApplTable.setStatus('current') if mibBuilder.loadTexts: cdbpLocalApplTable.setDescription('The table listing the Diameter applications supported by this server.') cdbpLocalApplEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplIndex")) if mibBuilder.loadTexts: cdbpLocalApplEntry.setStatus('current') if mibBuilder.loadTexts: cdbpLocalApplEntry.setDescription('A row entry representing a Diameter application on this server.') cdbpLocalApplIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpLocalApplIndex.setStatus('current') if mibBuilder.loadTexts: cdbpLocalApplIndex.setDescription('A number uniquely identifying a supported Diameter application. Upon reload, cdbpLocalApplIndex values may be changed.') cdbpLocalApplStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 2), StorageType().clone('nonVolatile')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpLocalApplStorageType.setReference('Textual Conventions for SMIv2, Section 2.') if mibBuilder.loadTexts: cdbpLocalApplStorageType.setStatus('current') if mibBuilder.loadTexts: cdbpLocalApplStorageType.setDescription('The storage type for this conceptual row. None of the columnar objects is writable when the conceptual row is permanent.') cdbpLocalApplRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 3), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpLocalApplRowStatus.setStatus('current') if mibBuilder.loadTexts: cdbpLocalApplRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdsgStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpLocalApplIndex has been set. cdbpLocalApplIndex may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpLocalApplStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpLocalApplStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpLocalApplStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).") cdbpLocalVendorTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11), ) if mibBuilder.loadTexts: cdbpLocalVendorTable.setStatus('current') if mibBuilder.loadTexts: cdbpLocalVendorTable.setDescription('The table listing the vendor IDs supported by local Diameter.') cdbpLocalVendorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorIndex")) if mibBuilder.loadTexts: cdbpLocalVendorEntry.setStatus('current') if mibBuilder.loadTexts: cdbpLocalVendorEntry.setDescription('A row entry representing a vendor ID supported by local Diameter.') cdbpLocalVendorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpLocalVendorIndex.setStatus('current') if mibBuilder.loadTexts: cdbpLocalVendorIndex.setDescription('A number uniquely identifying the vendor ID supported by local Diameter. Upon reload, cdbpLocalVendorIndex values may be changed.') cdbpLocalVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 9, 10415, 12645))).clone(namedValues=NamedValues(("diameterVendorIetf", 0), ("diameterVendorCisco", 9), ("diameterVendor3gpp", 10415), ("diameterVendorVodafone", 12645))).clone('diameterVendorIetf')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpLocalVendorId.setStatus('current') if mibBuilder.loadTexts: cdbpLocalVendorId.setDescription('The active vendor ID used for peer connections. diameterVendorIetf(0) - Diameter vendor id ietf diameterVendorCisco(9) - Diameter vendor id cisco diameterVendor3gpp(10415) - Diameter vendor id 3gpp diameterVendorVodafone(12645) - Diameter vendor id vodafone.') cdbpLocalVendorStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setReference('Textual Conventions for SMIv2, Section 2.') if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setStatus('current') if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.') cdbpLocalVendorRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 4), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpLocalVendorRowStatus.setStatus('current') if mibBuilder.loadTexts: cdbpLocalVendorRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpLocalVendorRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpLocalVendorId has been set. cdbpLocalVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpLocalVendorRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpLocalVendorRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpLocalVendorRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).") cdbpAppAdvToPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12), ) if mibBuilder.loadTexts: cdbpAppAdvToPeerTable.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvToPeerTable.setDescription('The table listing the applications advertised by this host to each peer and the types of service supported: accounting, authentication or both.') cdbpAppAdvToPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerVendorId"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerIndex")) if mibBuilder.loadTexts: cdbpAppAdvToPeerEntry.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvToPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.') cdbpAppAdvToPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpAppAdvToPeerVendorId.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvToPeerVendorId.setDescription('The IANA Enterprise Code value assigned to the vendor of the Diameter device.') cdbpAppAdvToPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpAppAdvToPeerIndex.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvToPeerIndex.setDescription('A number uniquely identifying the Diameter applications advertised as supported by this host to each peer. Upon reload, cdbpAppAdvToPeerIndex values may be changed.') cdbpAppAdvToPeerServices = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpAppAdvToPeerServices.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvToPeerServices.setDescription('The type of services supported for each application, accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.') cdbpAppAdvToPeerStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 4), StorageType().clone('nonVolatile')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setReference('Textual Conventions for SMIv2, Section 2.') if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.') cdbpAppAdvToPeerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 5), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpAppAdvToPeerRowStatus.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvToPeerRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpAppAdvToPeerRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpAppAdvToPeerVendorId has been set. cdbpAppAdvToPeerVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpAppAdvToPeerRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpAppAdvToPeerRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpAppAdvToPeerRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).") cdbpLocalStatsTotalPacketsIn = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 1), Counter32()).setUnits('packets').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsIn.setStatus('current') if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsIn.setDescription('The total number of packets received by Diameter Base Protocol.') cdbpLocalStatsTotalPacketsOut = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 2), Counter32()).setUnits('packets').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsOut.setStatus('current') if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsOut.setDescription('The total number of packets transmitted by Diameter Base Protocol.') cdbpLocalStatsTotalUpTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 3), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalStatsTotalUpTime.setStatus('current') if mibBuilder.loadTexts: cdbpLocalStatsTotalUpTime.setDescription('This object represents the total time the Diameter server has been up until now.') cdbpLocalResetTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 4), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpLocalResetTime.setStatus('current') if mibBuilder.loadTexts: cdbpLocalResetTime.setDescription("If the server keeps persistent state (e.g., a process) and supports a 'reset' operation (e.g., can be told to re-read configuration files), this value will be the time elapsed (in hundredths of a second) since the server was 'reset'. For software that does not have persistence or does not support a 'reset' operation, this value will be zero.") cdbpLocalConfigReset = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("reset", 2), ("initializing", 3), ("running", 4))).clone('other')).setMaxAccess("readwrite") if mibBuilder.loadTexts: cdbpLocalConfigReset.setStatus('current') if mibBuilder.loadTexts: cdbpLocalConfigReset.setDescription('Status/action object to reinitialize any persistent server state. When set to reset(2), any persistent server state (such as a process) is reinitialized as if the server had just been started. This value will never be returned by a read operation. When read, one of the following values will be returned: other(1) - server in some unknown state. reset(2) - command to reinitialize server state. initializing(3) - server (re)initializing. running(4) - server currently running.') cdbpPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1), ) if mibBuilder.loadTexts: cdbpPeerTable.setStatus('current') if mibBuilder.loadTexts: cdbpPeerTable.setDescription('The table listing information regarding the discovered or configured Diameter peer servers.') cdbpPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex")) if mibBuilder.loadTexts: cdbpPeerEntry.setStatus('current') if mibBuilder.loadTexts: cdbpPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.') cdbpPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpPeerIndex.setStatus('current') if mibBuilder.loadTexts: cdbpPeerIndex.setDescription('A number uniquely identifying each Diameter peer with which the host server communicates. Upon reload, cdbpPeerIndex values may be changed.') cdbpPeerId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 2), SnmpAdminString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpPeerId.setStatus('current') if mibBuilder.loadTexts: cdbpPeerId.setDescription('The server identifier for the Diameter peer. It must be unique and non-empty.') cdbpPeerPortConnect = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerPortConnect.setStatus('current') if mibBuilder.loadTexts: cdbpPeerPortConnect.setDescription('The connection port this server used to connect to the Diameter peer. If there is no active connection, this value will be zero(0).') cdbpPeerPortListen = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(3868)).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpPeerPortListen.setStatus('current') if mibBuilder.loadTexts: cdbpPeerPortListen.setDescription('The port the server is listening on.') cdbpPeerProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("tcp", 1), ("sctp", 2))).clone('tcp')).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerProtocol.setStatus('current') if mibBuilder.loadTexts: cdbpPeerProtocol.setDescription('The transport protocol (tcp/sctp) the Diameter peer is using. tcp(1) - Transmission Control Protocol sctp(2) - Stream Control Transmission Protocol.') cdbpPeerSecurity = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("tls", 2), ("ipsec", 3))).clone('other')).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerSecurity.setStatus('current') if mibBuilder.loadTexts: cdbpPeerSecurity.setDescription('The security the Diameter peer is using. other(1) - Unknown Security Protocol. tls(2) - Transport Layer Security Protocol. ipsec(3) - Internet Protocol Security.') cdbpPeerFirmwareRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerFirmwareRevision.setStatus('current') if mibBuilder.loadTexts: cdbpPeerFirmwareRevision.setDescription('Firmware revision of peer. If no firmware revision, the revision of the Diameter software module may be reported instead.') cdbpPeerStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 8), StorageType().clone('nonVolatile')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpPeerStorageType.setReference('Textual Conventions for SMIv2, Section 2.') if mibBuilder.loadTexts: cdbpPeerStorageType.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStorageType.setDescription('The storage type for this conceptual row. Only cdbpPeerPortListen object is writable when the conceptual row is permanent.') cdbpPeerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 9), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpPeerRowStatus.setStatus('current') if mibBuilder.loadTexts: cdbpPeerRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpPeerRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpPeerId has been set. cdbpPeerId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpPeerRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpPeerRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpPeerRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).") cdbpPeerIpAddrTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2), ) if mibBuilder.loadTexts: cdbpPeerIpAddrTable.setStatus('current') if mibBuilder.loadTexts: cdbpPeerIpAddrTable.setDescription('The table listing the Diameter server IP Addresses.') cdbpPeerIpAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddressIndex")) if mibBuilder.loadTexts: cdbpPeerIpAddrEntry.setStatus('current') if mibBuilder.loadTexts: cdbpPeerIpAddrEntry.setDescription('A row entry representing peer Diameter server IP Addresses.') cdbpPeerIpAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpPeerIpAddressIndex.setStatus('current') if mibBuilder.loadTexts: cdbpPeerIpAddressIndex.setDescription('A number uniquely identifying the number of IP Addresses supported by all Diameter peers.') cdbpPeerIpAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 2), InetAddressType()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerIpAddressType.setStatus('current') if mibBuilder.loadTexts: cdbpPeerIpAddressType.setDescription('The type of address stored in diameterPeerIpAddress.') cdbpPeerIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 3), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cdbpPeerIpAddress.setStatus('current') if mibBuilder.loadTexts: cdbpPeerIpAddress.setDescription('The active IP Address(es) used for connections.') cdbpAppAdvFromPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3), ) if mibBuilder.loadTexts: cdbpAppAdvFromPeerTable.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvFromPeerTable.setDescription('The table listing the applications advertised by each peer to this host and the types of service supported: accounting, authentication or both.') cdbpAppAdvFromPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerVendorId"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerIndex")) if mibBuilder.loadTexts: cdbpAppAdvFromPeerEntry.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvFromPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.') cdbpAppAdvFromPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpAppAdvFromPeerVendorId.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvFromPeerVendorId.setDescription('The IANA Enterprise Code value assigned to the vendor of the Diameter device.') cdbpAppAdvFromPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpAppAdvFromPeerIndex.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvFromPeerIndex.setDescription('A number uniquely identifying the applications advertised as supported from each Diameter peer.') cdbpAppAdvFromPeerType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpAppAdvFromPeerType.setStatus('current') if mibBuilder.loadTexts: cdbpAppAdvFromPeerType.setDescription('The type of services supported for each application, accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.') cdbpPeerVendorTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4), ) if mibBuilder.loadTexts: cdbpPeerVendorTable.setStatus('current') if mibBuilder.loadTexts: cdbpPeerVendorTable.setDescription('The table listing the Vendor IDs supported by the peer.') cdbpPeerVendorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorIndex")) if mibBuilder.loadTexts: cdbpPeerVendorEntry.setStatus('current') if mibBuilder.loadTexts: cdbpPeerVendorEntry.setDescription('A row entry representing a Vendor ID supported by the peer.') cdbpPeerVendorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpPeerVendorIndex.setStatus('current') if mibBuilder.loadTexts: cdbpPeerVendorIndex.setDescription('A number uniquely identifying the Vendor ID supported by the peer. Upon reload, cdbpPeerVendorIndex values may be changed.') cdbpPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 9, 10415, 12645))).clone(namedValues=NamedValues(("diameterVendorIetf", 0), ("diameterVendorCisco", 9), ("diameterVendor3gpp", 10415), ("diameterVendorVodafone", 12645))).clone('diameterVendorIetf')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpPeerVendorId.setStatus('current') if mibBuilder.loadTexts: cdbpPeerVendorId.setDescription('The active vendor ID used for peer connections. diameterVendorIetf(0) - Diameter vendor id ietf diameterVendorCisco(9) - Diameter vendor id cisco diameterVendor3gpp(10415) - Diameter vendor id 3gpp diameterVendorVodafone(12645) - Diameter vendor id vodafone.') cdbpPeerVendorStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setReference('Textual Conventions for SMIv2, Section 2.') if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setStatus('current') if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.') cdbpPeerVendorRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 4), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cdbpPeerVendorRowStatus.setStatus('current') if mibBuilder.loadTexts: cdbpPeerVendorRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpPeerVendorRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpPeerVendorId has been set. Also, a newly created row cannot be made active until the corresponding 'cdbpPeerIndex' has been set. cdbpPeerVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpPeerVendorRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpPeerVendorRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpPeerVendorRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).") cdbpPeerStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1), ) if mibBuilder.loadTexts: cdbpPeerStatsTable.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsTable.setDescription('The table listing the Diameter peer statistics.') cdbpPeerStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex")) if mibBuilder.loadTexts: cdbpPeerStatsEntry.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsEntry.setDescription('A row entry representing a Diameter peer.') cdbpPeerStatsState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("closed", 1), ("waitConnAck", 2), ("waitICEA", 3), ("elect", 4), ("waitReturns", 5), ("rOpen", 6), ("iOpen", 7), ("closing", 8)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsState.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsState.setDescription('Connection state in the Peer State Machine of the peer with which this Diameter server is communicating. closed(1) - Connection closed with this peer. waitConnAck(2) - Waiting for an acknowledgment from this peer. waitICEA(3) - Waiting for a Capabilities-Exchange- Answer from this peer. elect(4) - When the peer and the server are both trying to bring up a connection with each other at the same time. An election process begins which determines which socket remains open. waitReturns(5) - Waiting for election returns. r-open(6) - Responder transport connection is used for communication. i-open(7) - Initiator transport connection is used for communication. closing(8) - Actively closing and doing cleanup.') cdbpPeerStatsStateDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 2), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsStateDuration.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsStateDuration.setDescription('This object represents the Peer state duration.') cdbpPeerStatsLastDiscCause = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("rebooting", 1), ("busy", 2), ("doNotWantToTalk", 3), ("election", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsLastDiscCause.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsLastDiscCause.setDescription("The last cause for a peers disconnection. rebooting(1) - A scheduled reboot is imminent. busy(2) - The peer's internal resources are constrained, and it has determined that the transport connection needs to be shutdown. doNotWantToTalk(3) - The peer has determined that it does not see a need for the transport connection to exist, since it does not expect any messages to be exchanged in the foreseeable future. electionLost(4) - The peer has determined that it has lost the election process and has therefore disconnected the transport connection.") cdbpPeerStatsWhoInitDisconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("host", 1), ("peer", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsWhoInitDisconnect.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsWhoInitDisconnect.setDescription('Did the host or peer initiate the disconnect? host(1) - If this server initiated the disconnect. peer(2) - If the peer with which this server was connected initiated the disconnect.') cdbpPeerStatsDWCurrentStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("okay", 1), ("suspect", 2), ("down", 3), ("reopen", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDWCurrentStatus.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDWCurrentStatus.setDescription('This object indicates the connection status. okay(1) - Indicates the connection is presumed working. suspect(2) - Indicates the connection is possibly congested or down. down(3) - The peer is no longer reachable, causing the transport connection to be shutdown. reopen(4) - Three watchdog messages are exchanged with accepted round trip times, and the connection to the peer is considered stabilized.') cdbpPeerStatsTimeoutConnAtmpts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 6), Counter32()).setUnits('attempts').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsTimeoutConnAtmpts.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsTimeoutConnAtmpts.setDescription('If there is no transport connection with a peer, this is the number of times the server attempts to connect to that peer. This is reset on disconnection.') cdbpPeerStatsASRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 7), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsASRsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsASRsIn.setDescription('Abort-Session-Request messages received from the peer.') cdbpPeerStatsASRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 8), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsASRsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsASRsOut.setDescription('Abort-Session-Request messages sent to the peer.') cdbpPeerStatsASAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 9), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsASAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsASAsIn.setDescription('Number of Abort-Session-Answer messages received from the peer.') cdbpPeerStatsASAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 10), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsASAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsASAsOut.setDescription('Number of Abort-Session-Answer messages sent to the peer.') cdbpPeerStatsACRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 11), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsACRsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsACRsIn.setDescription('Number of Accounting-Request messages received from the peer.') cdbpPeerStatsACRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 12), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsACRsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsACRsOut.setDescription('Number of Accounting-Request messages sent to the peer.') cdbpPeerStatsACAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 13), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsACAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsACAsIn.setDescription('Number of Accounting-Answer messages received from the peer.') cdbpPeerStatsACAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 14), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsACAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsACAsOut.setDescription('Number of Accounting-Answer messages sent to the peer.') cdbpPeerStatsCERsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 15), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsCERsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsCERsIn.setDescription('Number of Capabilities-Exchange-Request messages received from the peer.') cdbpPeerStatsCERsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 16), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsCERsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsCERsOut.setDescription('Number of Capabilities-Exchange-Request messages sent to the peer.') cdbpPeerStatsCEAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 17), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsCEAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsCEAsIn.setDescription('Number of Capabilities-Exchange-Answer messages received from the peer.') cdbpPeerStatsCEAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 18), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsCEAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsCEAsOut.setDescription('Number of Capabilities-Exchange-Answer messages sent to the peer.') cdbpPeerStatsDWRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 19), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDWRsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDWRsIn.setDescription('Number of Device-Watchdog-Request messages received from the peer.') cdbpPeerStatsDWRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 20), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDWRsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDWRsOut.setDescription('Number of Device-Watchdog-Request messages sent to the peer.') cdbpPeerStatsDWAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 21), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDWAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDWAsIn.setDescription('Number of Device-Watchdog-Answer messages received from the peer.') cdbpPeerStatsDWAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 22), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDWAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDWAsOut.setDescription('Number of Device-Watchdog-Answer messages sent to the peer.') cdbpPeerStatsDPRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 23), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDPRsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDPRsIn.setDescription('Number of Disconnect-Peer-Request messages received.') cdbpPeerStatsDPRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 24), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDPRsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDPRsOut.setDescription('Number of Disconnect-Peer-Request messages sent.') cdbpPeerStatsDPAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 25), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDPAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDPAsIn.setDescription('Number of Disconnect-Peer-Answer messages received.') cdbpPeerStatsDPAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 26), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDPAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDPAsOut.setDescription('Number of Disconnect-Peer-Answer messages sent.') cdbpPeerStatsRARsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 27), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsRARsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsRARsIn.setDescription('Number of Re-Auth-Request messages received.') cdbpPeerStatsRARsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 28), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsRARsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsRARsOut.setDescription('Number of Re-Auth-Request messages sent.') cdbpPeerStatsRAAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 29), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsRAAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsRAAsIn.setDescription('Number of Re-Auth-Answer messages received.') cdbpPeerStatsRAAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 30), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsRAAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsRAAsOut.setDescription('Number of Re-Auth-Answer messages sent.') cdbpPeerStatsSTRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 31), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsSTRsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsSTRsIn.setDescription('Number of Session-Termination-Request messages received from the peer.') cdbpPeerStatsSTRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 32), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsSTRsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsSTRsOut.setDescription('Number of Session-Termination-Request messages sent to the peer.') cdbpPeerStatsSTAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 33), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsSTAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsSTAsIn.setDescription('Number of Session-Termination-Answer messages received from the peer.') cdbpPeerStatsSTAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 34), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsSTAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsSTAsOut.setDescription('Number of Session-Termination-Answer messages sent to the peer.') cdbpPeerStatsDWReqTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 35), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsDWReqTimer.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsDWReqTimer.setDescription('Device-Watchdog Request Timer, which is the interval between packets sent to peers.') cdbpPeerStatsRedirectEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 36), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsRedirectEvents.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsRedirectEvents.setDescription('Redirect Event count, which is the number of redirects sent from a peer.') cdbpPeerStatsAccDupRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 37), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsAccDupRequests.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsAccDupRequests.setDescription('The number of duplicate Diameter Accounting-Request packets received.') cdbpPeerStatsMalformedReqsts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 38), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsMalformedReqsts.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsMalformedReqsts.setDescription('The number of malformed Diameter packets received.') cdbpPeerStatsAccsNotRecorded = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 39), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsAccsNotRecorded.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsAccsNotRecorded.setDescription('The number of Diameter Accounting-Request packets which were received and responded to but not recorded.') cdbpPeerStatsAccRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 40), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsAccRetrans.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsAccRetrans.setDescription('The number of Diameter Accounting-Request packets retransmitted to this Diameter server.') cdbpPeerStatsTotalRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 41), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsTotalRetrans.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsTotalRetrans.setDescription('The number of Diameter packets retransmitted to this Diameter server, not to include Diameter Accounting-Request packets retransmitted.') cdbpPeerStatsAccPendReqstsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 42), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsAccPendReqstsOut.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsAccPendReqstsOut.setDescription('The number of Diameter Accounting-Request packets sent to this peer that have not yet timed out or received a response. This variable is incremented when an Accounting-Request is sent to this server and decremented due to receipt of an Accounting-Response, a timeout or a retransmission.') cdbpPeerStatsAccReqstsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 43), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsAccReqstsDropped.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsAccReqstsDropped.setDescription('The number of Accounting-Requests to this server that have been dropped.') cdbpPeerStatsHByHDropMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 44), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsHByHDropMessages.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsHByHDropMessages.setDescription('An answer message that is received with an unknown hop-by-hop identifier. Does not include accounting requests dropped.') cdbpPeerStatsEToEDupMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 45), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsEToEDupMessages.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsEToEDupMessages.setDescription('Duplicate answer messages that are to be locally consumed. Does not include duplicate accounting requests received.') cdbpPeerStatsUnknownTypes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 46), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsUnknownTypes.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsUnknownTypes.setDescription('The number of Diameter packets of unknown type which were received.') cdbpPeerStatsProtocolErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 47), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsProtocolErrors.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsProtocolErrors.setDescription('This object represents the Number of protocol errors returned to peer, but not including redirects.') cdbpPeerStatsTransientFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 48), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsTransientFailures.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsTransientFailures.setDescription('This object represents the transient failure count.') cdbpPeerStatsPermanentFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 49), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsPermanentFailures.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsPermanentFailures.setDescription('This object represents the Number of permanent failures returned to peer.') cdbpPeerStatsTransportDown = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 50), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpPeerStatsTransportDown.setStatus('current') if mibBuilder.loadTexts: cdbpPeerStatsTransportDown.setDescription('This object represents the Number of unexpected transport failures.') cdbpRealmKnownPeersTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1), ) if mibBuilder.loadTexts: cdbpRealmKnownPeersTable.setStatus('current') if mibBuilder.loadTexts: cdbpRealmKnownPeersTable.setDescription('The table listing the Diameter realms and known peers.') cdbpRealmKnownPeersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeersIndex")) if mibBuilder.loadTexts: cdbpRealmKnownPeersEntry.setStatus('current') if mibBuilder.loadTexts: cdbpRealmKnownPeersEntry.setDescription('A row entry representing a Diameter realm and known peers.') cdbpRealmKnownPeersIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpRealmKnownPeersIndex.setStatus('current') if mibBuilder.loadTexts: cdbpRealmKnownPeersIndex.setDescription('A number uniquely identifying a peer known to this realm. Upon reload, cdbpRealmKnownPeersIndex values may be changed.') cdbpRealmKnownPeers = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmKnownPeers.setStatus('current') if mibBuilder.loadTexts: cdbpRealmKnownPeers.setDescription('The index of the peer this realm knows about. This is an ordered list, where the ordering signifies the order in which the peers are tried. Same as the cdbpPeerIndex') cdbpRealmKnownPeersChosen = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("roundRobin", 1), ("loadBalance", 2), ("firstPreferred", 3), ("mostRecentFirst", 4), ("other", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmKnownPeersChosen.setStatus('current') if mibBuilder.loadTexts: cdbpRealmKnownPeersChosen.setDescription('How the realm chooses which peer to send packets to. roundRobin(1) - The peer used for each transaction is selected based on the order in which peers are configured. loadBalance(2) - The peer used for each transaction is based on the load metric (maybe implementation dependent) of all peers defined for the realm, with the least loaded server selected first. firstPreferred(3) - The first defined server is always used for transactions unless failover occurs. mostRecentFirst(4) - The most recently used server is used first for each transaction.') cdbpRealmMessageRouteTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1), ) if mibBuilder.loadTexts: cdbpRealmMessageRouteTable.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteTable.setDescription('The table listing the Diameter realm-based message route information.') cdbpRealmMessageRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteIndex")) if mibBuilder.loadTexts: cdbpRealmMessageRouteEntry.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteEntry.setDescription('A row entry representing a Diameter realm based message route server.') cdbpRealmMessageRouteIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cdbpRealmMessageRouteIndex.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteIndex.setDescription('A number uniquely identifying each realm.') cdbpRealmMessageRouteRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 2), SnmpAdminString()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteRealm.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteRealm.setDescription('This object represents the realm name') cdbpRealmMessageRouteApp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteApp.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteApp.setDescription('Application id used to route packets to this realm.') cdbpRealmMessageRouteType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteType.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteType.setDescription('The types of service supported for each realm application: accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.') cdbpRealmMessageRouteAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("local", 1), ("relay", 2), ("proxy", 3), ("redirect", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteAction.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteAction.setDescription('The action is used to identify how a message should be treated based on the realm, application and type. local(1) - Diameter messages that resolve to a route entry with the Local Action set to Local can be satisfied locally, and do not need to be routed to another server. relay(2) - All Diameter messages that fall within this category MUST be routed to a next-hop server, without modifying any non-routing AVPs. proxy(3) - All Diameter messages that fall within this category MUST be routed to a next-hop server. redirect(4) - Diameter messages that fall within this category MUST have the identity of the home Diameter server(s) appended, and returned to the sender of the message.') cdbpRealmMessageRouteACRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 6), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsIn.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsIn.setDescription('Number of Accounting-Request messages received from the realm.') cdbpRealmMessageRouteACRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 7), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsOut.setDescription('Number of Accounting-Request messages sent to the realm.') cdbpRealmMessageRouteACAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 8), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsIn.setDescription('Number of Accounting-Answer messages received from the realm.') cdbpRealmMessageRouteACAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 9), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsOut.setDescription('Number of Accounting-Answer messages sent to the realm.') cdbpRealmMessageRouteRARsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 10), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsIn.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsIn.setDescription('Number of Re-Auth-Request messages received from the realm.') cdbpRealmMessageRouteRARsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 11), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsOut.setDescription('Number of Re-Auth-Request messages sent to the realm.') cdbpRealmMessageRouteRAAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 12), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsIn.setDescription('Number of Re-Auth-Answer messages received from the realm.') cdbpRealmMessageRouteRAAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 13), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsOut.setDescription('Number of Re-Auth-Answer messages sent to the realm.') cdbpRealmMessageRouteSTRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 14), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsIn.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsIn.setDescription('Number of Session-Termination-Request messages received from the realm.') cdbpRealmMessageRouteSTRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 15), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsOut.setDescription('Number of Session-Termination-Request messages sent to the realm.') cdbpRealmMessageRouteSTAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 16), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsIn.setDescription('Number of Session-Termination-Answer messages received from the realm.') cdbpRealmMessageRouteSTAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 17), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsOut.setDescription('Number of Session-Termination-Answer messages sent to the realm.') cdbpRealmMessageRouteASRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 18), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsIn.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsIn.setDescription('Number of Abort-Session-Request messages received from the realm.') cdbpRealmMessageRouteASRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 19), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsOut.setDescription('Number of Abort-Session-Request messages sent to the realm.') cdbpRealmMessageRouteASAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 20), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsIn.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsIn.setDescription('Number of Abort-Session-Answer messages received from the realm.') cdbpRealmMessageRouteASAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 21), Counter32()).setUnits('messages').setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsOut.setDescription('Number of Abort-Session-Answer messages sent to the realm.') cdbpRealmMessageRouteAccRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 22), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteAccRetrans.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteAccRetrans.setDescription('The number of Diameter accounting packets retransmitted to this realm.') cdbpRealmMessageRouteAccDupReqsts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 23), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteAccDupReqsts.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteAccDupReqsts.setDescription('The number of duplicate Diameter accounting packets sent to this realm.') cdbpRealmMessageRoutePendReqstsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 24), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRoutePendReqstsOut.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRoutePendReqstsOut.setDescription('The number of Diameter Accounting-Request packets sent to this peer that have not yet timed out or received a response. This variable is incremented when an Accounting-Request is sent to this server and decremented due to receipt of an Accounting-Response, a timeout or a retransmission.') cdbpRealmMessageRouteReqstsDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 25), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cdbpRealmMessageRouteReqstsDrop.setStatus('current') if mibBuilder.loadTexts: cdbpRealmMessageRouteReqstsDrop.setDescription('The number of requests dropped by this realm.') ciscoDiameterBasePMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 1)) ciscoDiameterBasePMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2)) ciscoDiameterBasePMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 1, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerStatsGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBNotificationsGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBTrapCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalStatsSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerStatsSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBRealmCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBRealmStatsSkippedGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBCompliance = ciscoDiameterBasePMIBCompliance.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBCompliance.setDescription('The compliance statement for Diameter Base Protocol entities.') ciscoDiameterBasePMIBLocalCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalRealm"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyEnabled"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyInfraState"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyLastSwitchover"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalOriginHost"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorRowStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBLocalCfgGroup = ciscoDiameterBasePMIBLocalCfgGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalCfgGroup.setDescription('A collection of objects providing configuration common to the server.') ciscoDiameterBasePMIBPeerCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 2)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerPortConnect"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerPortListen"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerProtocol"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerSecurity"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerFirmwareRevision"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerRowStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddressType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddress"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorRowStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBPeerCfgGroup = ciscoDiameterBasePMIBPeerCfgGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerCfgGroup.setDescription('A collection of objects providing configuration of the Diameter peers.') ciscoDiameterBasePMIBPeerStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 3)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsState"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsStateDuration"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsLastDiscCause"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsWhoInitDisconnect"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWCurrentStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTimeoutConnAtmpts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCERsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCERsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCEAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCEAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRARsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRARsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRAAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRAAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWReqTimer"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRedirectEvents"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccDupRequests"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsMalformedReqsts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccsNotRecorded"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTotalRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccPendReqstsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccReqstsDropped"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsHByHDropMessages"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsEToEDupMessages"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsUnknownTypes"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsProtocolErrors"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransientFailures"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsPermanentFailures"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransportDown")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBPeerStatsGroup = ciscoDiameterBasePMIBPeerStatsGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerStatsGroup.setDescription('A collection of objects providing statistics of the Diameter peers.') ciscoDiameterBasePMIBNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 4)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtProtocolErrorNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtTransientFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPermanentFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPeerConnectionDownNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPeerConnectionUpNotif")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBNotificationsGroup = ciscoDiameterBasePMIBNotificationsGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBNotificationsGroup.setDescription('The set of notifications which an agent is required to implement.') ciscoDiameterBasePMIBTrapCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 5)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnableProtocolErrorNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnableTransientFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePermanentFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePeerConnectionDownNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePeerConnectionUpNotif")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBTrapCfgGroup = ciscoDiameterBasePMIBTrapCfgGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBTrapCfgGroup.setDescription('A collection of objects providing configuration for base protocol notifications.') ciscoDiameterBasePMIBLocalCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 6)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddrType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddress"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalTcpListenPort"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalSctpListenPort"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalUpTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalResetTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalConfigReset"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplRowStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerServices"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerRowStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBLocalCfgSkippedGroup = ciscoDiameterBasePMIBLocalCfgSkippedGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalCfgSkippedGroup.setDescription('A collection of objects providing configuration common to the server.') ciscoDiameterBasePMIBLocalStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 7)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalUpTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalResetTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalConfigReset")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBLocalStatsSkippedGroup = ciscoDiameterBasePMIBLocalStatsSkippedGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalStatsSkippedGroup.setDescription('A collection of objects providing statistics common to the server.') ciscoDiameterBasePMIBPeerCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 8)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerType")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBPeerCfgSkippedGroup = ciscoDiameterBasePMIBPeerCfgSkippedGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerCfgSkippedGroup.setDescription('A collection of objects providing configuration for Diameter peers.') ciscoDiameterBasePMIBPeerStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 9)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWCurrentStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWReqTimer"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRedirectEvents"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccDupRequests"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsEToEDupMessages")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBPeerStatsSkippedGroup = ciscoDiameterBasePMIBPeerStatsSkippedGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerStatsSkippedGroup.setDescription('A collection of objects providing statistics of Diameter peers.') ciscoDiameterBasePMIBRealmCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 10)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeers"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeersChosen")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBRealmCfgSkippedGroup = ciscoDiameterBasePMIBRealmCfgSkippedGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBRealmCfgSkippedGroup.setDescription('A collection of objects providing configuration for realm message routing.') ciscoDiameterBasePMIBRealmStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 11)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRealm"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteApp"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAction"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRARsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRARsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRAAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRAAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAccRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAccDupReqsts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRoutePendReqstsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteReqstsDrop")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ciscoDiameterBasePMIBRealmStatsSkippedGroup = ciscoDiameterBasePMIBRealmStatsSkippedGroup.setStatus('current') if mibBuilder.loadTexts: ciscoDiameterBasePMIBRealmStatsSkippedGroup.setDescription('A collection of objects providing statistics of realm message routing.') mibBuilder.exportSymbols("CISCO-DIAMETER-BASE-PROTOCOL-MIB", cdbpRealmMessageRouteACRsIn=cdbpRealmMessageRouteACRsIn, cdbpRealmStats=cdbpRealmStats, ciscoDiameterBasePMIBCompliance=ciscoDiameterBasePMIBCompliance, cdbpPeerStatsSTAsOut=cdbpPeerStatsSTAsOut, cdbpPeerProtocol=cdbpPeerProtocol, cdbpPeerTable=cdbpPeerTable, ciscoDiaBaseProtPeerConnectionDownNotif=ciscoDiaBaseProtPeerConnectionDownNotif, cdbpLocalVendorIndex=cdbpLocalVendorIndex, cdbpPeerStatsDWReqTimer=cdbpPeerStatsDWReqTimer, cdbpPeerStatsACAsIn=cdbpPeerStatsACAsIn, cdbpPeerStatsDWRsOut=cdbpPeerStatsDWRsOut, ciscoDiaBaseProtEnablePeerConnectionDownNotif=ciscoDiaBaseProtEnablePeerConnectionDownNotif, cdbpPeerStatsDPAsIn=cdbpPeerStatsDPAsIn, cdbpPeerId=cdbpPeerId, cdbpAppAdvFromPeerTable=cdbpAppAdvFromPeerTable, cdbpRealmMessageRouteSTRsIn=cdbpRealmMessageRouteSTRsIn, cdbpRealmMessageRouteApp=cdbpRealmMessageRouteApp, cdbpLocalVendorEntry=cdbpLocalVendorEntry, cdbpRealmMessageRouteAccDupReqsts=cdbpRealmMessageRouteAccDupReqsts, cdbpAppAdvToPeerVendorId=cdbpAppAdvToPeerVendorId, cdbpLocalIpAddrType=cdbpLocalIpAddrType, cdbpPeerSecurity=cdbpPeerSecurity, ciscoDiaBaseProtTransientFailureNotif=ciscoDiaBaseProtTransientFailureNotif, cdbpPeerStatsAccPendReqstsOut=cdbpPeerStatsAccPendReqstsOut, ciscoDiameterBasePMIBLocalCfgGroup=ciscoDiameterBasePMIBLocalCfgGroup, cdbpRealmMessageRouteRealm=cdbpRealmMessageRouteRealm, cdbpPeerEntry=cdbpPeerEntry, cdbpRedundancyLastSwitchover=cdbpRedundancyLastSwitchover, cdbpRealmMessageRouteAction=cdbpRealmMessageRouteAction, cdbpPeerIpAddrTable=cdbpPeerIpAddrTable, cdbpPeerStatsSTAsIn=cdbpPeerStatsSTAsIn, cdbpRealmCfgs=cdbpRealmCfgs, cdbpPeerStatsTransientFailures=cdbpPeerStatsTransientFailures, cdbpRealmKnownPeersIndex=cdbpRealmKnownPeersIndex, cdbpLocalVendorTable=cdbpLocalVendorTable, cdbpPeerStorageType=cdbpPeerStorageType, cdbpAppAdvFromPeerVendorId=cdbpAppAdvFromPeerVendorId, cdbpPeerStatsRAAsOut=cdbpPeerStatsRAAsOut, cdbpLocalId=cdbpLocalId, ciscoDiameterBasePMIBNotifs=ciscoDiameterBasePMIBNotifs, ciscoDiameterBasePMIBGroups=ciscoDiameterBasePMIBGroups, cdbpPeerStats=cdbpPeerStats, cdbpRealmMessageRouteASRsOut=cdbpRealmMessageRouteASRsOut, cdbpRealmMessageRouteAccRetrans=cdbpRealmMessageRouteAccRetrans, cdbpAppAdvToPeerServices=cdbpAppAdvToPeerServices, cdbpPeerStatsACRsOut=cdbpPeerStatsACRsOut, cdbpRedundancyEnabled=cdbpRedundancyEnabled, cdbpPeerVendorRowStatus=cdbpPeerVendorRowStatus, cdbpPeerStatsUnknownTypes=cdbpPeerStatsUnknownTypes, ciscoDiameterBasePMIBCompliances=ciscoDiameterBasePMIBCompliances, cdbpPeerStatsEToEDupMessages=cdbpPeerStatsEToEDupMessages, cdbpPeerVendorEntry=cdbpPeerVendorEntry, ciscoDiaBaseProtEnableProtocolErrorNotif=ciscoDiaBaseProtEnableProtocolErrorNotif, cdbpPeerStatsTable=cdbpPeerStatsTable, cdbpPeerIpAddrEntry=cdbpPeerIpAddrEntry, ciscoDiameterBasePMIBConform=ciscoDiameterBasePMIBConform, cdbpPeerStatsSTRsOut=cdbpPeerStatsSTRsOut, cdbpRealmMessageRouteIndex=cdbpRealmMessageRouteIndex, cdbpAppAdvToPeerIndex=cdbpAppAdvToPeerIndex, ciscoDiameterBasePMIBPeerStatsGroup=ciscoDiameterBasePMIBPeerStatsGroup, ciscoDiaBaseProtEnablePeerConnectionUpNotif=ciscoDiaBaseProtEnablePeerConnectionUpNotif, cdbpLocalApplRowStatus=cdbpLocalApplRowStatus, ciscoDiaBaseProtEnablePermanentFailureNotif=ciscoDiaBaseProtEnablePermanentFailureNotif, ciscoDiameterBasePMIBPeerStatsSkippedGroup=ciscoDiameterBasePMIBPeerStatsSkippedGroup, PYSNMP_MODULE_ID=ciscoDiameterBasePMIB, ciscoDiameterBasePMIBObjects=ciscoDiameterBasePMIBObjects, cdbpLocalRealm=cdbpLocalRealm, cdbpLocalVendorId=cdbpLocalVendorId, cdbpLocalResetTime=cdbpLocalResetTime, ciscoDiameterBasePMIBRealmCfgSkippedGroup=ciscoDiameterBasePMIBRealmCfgSkippedGroup, cdbpPeerStatsDPRsIn=cdbpPeerStatsDPRsIn, cdbpPeerStatsEntry=cdbpPeerStatsEntry, cdbpPeerStatsAccDupRequests=cdbpPeerStatsAccDupRequests, cdbpRealmMessageRoutePendReqstsOut=cdbpRealmMessageRoutePendReqstsOut, cdbpTrapCfgs=cdbpTrapCfgs, ciscoDiameterBasePMIBTrapCfgGroup=ciscoDiameterBasePMIBTrapCfgGroup, cdbpAppAdvFromPeerType=cdbpAppAdvFromPeerType, cdbpPeerIndex=cdbpPeerIndex, cdbpPeerVendorId=cdbpPeerVendorId, cdbpAppAdvToPeerRowStatus=cdbpAppAdvToPeerRowStatus, cdbpLocalStatsTotalPacketsOut=cdbpLocalStatsTotalPacketsOut, cdbpPeerStatsHByHDropMessages=cdbpPeerStatsHByHDropMessages, cdbpRealmMessageRouteASAsIn=cdbpRealmMessageRouteASAsIn, cdbpLocalStats=cdbpLocalStats, cdbpPeerStatsRedirectEvents=cdbpPeerStatsRedirectEvents, cdbpPeerStatsASRsOut=cdbpPeerStatsASRsOut, cdbpPeerStatsTotalRetrans=cdbpPeerStatsTotalRetrans, cdbpRealmMessageRouteEntry=cdbpRealmMessageRouteEntry, cdbpPeerStatsState=cdbpPeerStatsState, cdbpPeerStatsSTRsIn=cdbpPeerStatsSTRsIn, cdbpPeerFirmwareRevision=cdbpPeerFirmwareRevision, cdbpLocalTcpListenPort=cdbpLocalTcpListenPort, cdbpPeerStatsCERsOut=cdbpPeerStatsCERsOut, cdbpLocalApplStorageType=cdbpLocalApplStorageType, cdbpPeerStatsAccRetrans=cdbpPeerStatsAccRetrans, cdbpPeerStatsPermanentFailures=cdbpPeerStatsPermanentFailures, cdbpLocalIpAddrIndex=cdbpLocalIpAddrIndex, cdbpRealmKnownPeersEntry=cdbpRealmKnownPeersEntry, cdbpPeerStatsDWAsIn=cdbpPeerStatsDWAsIn, cdbpLocalStatsTotalUpTime=cdbpLocalStatsTotalUpTime, cdbpPeerStatsDPAsOut=cdbpPeerStatsDPAsOut, ciscoDiaBaseProtPermanentFailureNotif=ciscoDiaBaseProtPermanentFailureNotif, ciscoDiameterBasePMIBLocalStatsSkippedGroup=ciscoDiameterBasePMIBLocalStatsSkippedGroup, cdbpPeerStatsRAAsIn=cdbpPeerStatsRAAsIn, cdbpPeerStatsStateDuration=cdbpPeerStatsStateDuration, cdbpPeerStatsProtocolErrors=cdbpPeerStatsProtocolErrors, ciscoDiameterBasePMIBNotificationsGroup=ciscoDiameterBasePMIBNotificationsGroup, cdbpRealmMessageRouteACRsOut=cdbpRealmMessageRouteACRsOut, cdbpLocalApplEntry=cdbpLocalApplEntry, cdbpPeerStatsDWAsOut=cdbpPeerStatsDWAsOut, cdbpPeerStatsAccReqstsDropped=cdbpPeerStatsAccReqstsDropped, cdbpRealmKnownPeersTable=cdbpRealmKnownPeersTable, cdbpPeerStatsAccsNotRecorded=cdbpPeerStatsAccsNotRecorded, cdbpLocalVendorRowStatus=cdbpLocalVendorRowStatus, cdbpLocalIpAddress=cdbpLocalIpAddress, cdbpLocalIpAddrEntry=cdbpLocalIpAddrEntry, cdbpRealmMessageRouteRARsIn=cdbpRealmMessageRouteRARsIn, cdbpRealmMessageRouteACAsIn=cdbpRealmMessageRouteACAsIn, cdbpLocalOriginHost=cdbpLocalOriginHost, cdbpRealmMessageRouteRAAsIn=cdbpRealmMessageRouteRAAsIn, cdbpRealmMessageRouteRAAsOut=cdbpRealmMessageRouteRAAsOut, ciscoDiameterBasePMIBPeerCfgSkippedGroup=ciscoDiameterBasePMIBPeerCfgSkippedGroup, cdbpPeerPortConnect=cdbpPeerPortConnect, cdbpPeerStatsWhoInitDisconnect=cdbpPeerStatsWhoInitDisconnect, cdbpPeerStatsCEAsOut=cdbpPeerStatsCEAsOut, cdbpAppAdvFromPeerIndex=cdbpAppAdvFromPeerIndex, cdbpRealmMessageRouteASRsIn=cdbpRealmMessageRouteASRsIn, cdbpPeerStatsLastDiscCause=cdbpPeerStatsLastDiscCause, cdbpPeerStatsASAsIn=cdbpPeerStatsASAsIn, cdbpPeerIpAddressType=cdbpPeerIpAddressType, cdbpPeerStatsRARsOut=cdbpPeerStatsRARsOut, cdbpPeerStatsDWCurrentStatus=cdbpPeerStatsDWCurrentStatus, cdbpRealmMessageRouteSTRsOut=cdbpRealmMessageRouteSTRsOut, cdbpLocalCfgs=cdbpLocalCfgs, cdbpRealmMessageRouteReqstsDrop=cdbpRealmMessageRouteReqstsDrop, cdbpLocalStatsTotalPacketsIn=cdbpLocalStatsTotalPacketsIn, cdbpPeerCfgs=cdbpPeerCfgs, cdbpRealmKnownPeers=cdbpRealmKnownPeers, cdbpPeerStatsMalformedReqsts=cdbpPeerStatsMalformedReqsts, cdbpRealmMessageRouteRARsOut=cdbpRealmMessageRouteRARsOut, cdbpRealmMessageRouteSTAsOut=cdbpRealmMessageRouteSTAsOut, cdbpLocalIpAddrTable=cdbpLocalIpAddrTable, cdbpPeerStatsACRsIn=cdbpPeerStatsACRsIn, ciscoDiameterBasePMIBRealmStatsSkippedGroup=ciscoDiameterBasePMIBRealmStatsSkippedGroup, cdbpRealmKnownPeersChosen=cdbpRealmKnownPeersChosen, cdbpLocalApplTable=cdbpLocalApplTable, cdbpRealmMessageRouteType=cdbpRealmMessageRouteType, cdbpPeerStatsASRsIn=cdbpPeerStatsASRsIn, cdbpPeerStatsTransportDown=cdbpPeerStatsTransportDown, cdbpRedundancyInfraState=cdbpRedundancyInfraState, ciscoDiameterBasePMIBPeerCfgGroup=ciscoDiameterBasePMIBPeerCfgGroup, cdbpRealmMessageRouteACAsOut=cdbpRealmMessageRouteACAsOut, cdbpAppAdvFromPeerEntry=cdbpAppAdvFromPeerEntry, ciscoDiaBaseProtEnableTransientFailureNotif=ciscoDiaBaseProtEnableTransientFailureNotif, cdbpLocalConfigReset=cdbpLocalConfigReset, cdbpPeerIpAddress=cdbpPeerIpAddress, cdbpAppAdvToPeerTable=cdbpAppAdvToPeerTable, cdbpPeerStatsTimeoutConnAtmpts=cdbpPeerStatsTimeoutConnAtmpts, cdbpPeerStatsDWRsIn=cdbpPeerStatsDWRsIn, cdbpRealmMessageRouteTable=cdbpRealmMessageRouteTable, cdbpPeerStatsRARsIn=cdbpPeerStatsRARsIn, cdbpPeerStatsACAsOut=cdbpPeerStatsACAsOut, cdbpRealmMessageRouteSTAsIn=cdbpRealmMessageRouteSTAsIn, cdbpPeerStatsASAsOut=cdbpPeerStatsASAsOut, cdbpPeerStatsDPRsOut=cdbpPeerStatsDPRsOut, cdbpPeerVendorTable=cdbpPeerVendorTable, ciscoDiaBaseProtPeerConnectionUpNotif=ciscoDiaBaseProtPeerConnectionUpNotif, cdbpPeerVendorStorageType=cdbpPeerVendorStorageType, cdbpPeerVendorIndex=cdbpPeerVendorIndex, cdbpPeerStatsCERsIn=cdbpPeerStatsCERsIn, cdbpRealmMessageRouteASAsOut=cdbpRealmMessageRouteASAsOut, ciscoDiameterBasePMIBLocalCfgSkippedGroup=ciscoDiameterBasePMIBLocalCfgSkippedGroup, cdbpPeerPortListen=cdbpPeerPortListen, cdbpAppAdvToPeerEntry=cdbpAppAdvToPeerEntry, ciscoDiaBaseProtProtocolErrorNotif=ciscoDiaBaseProtProtocolErrorNotif, ciscoDiameterBasePMIB=ciscoDiameterBasePMIB, cdbpLocalApplIndex=cdbpLocalApplIndex, cdbpAppAdvToPeerStorageType=cdbpAppAdvToPeerStorageType, cdbpLocalVendorStorageType=cdbpLocalVendorStorageType, cdbpPeerIpAddressIndex=cdbpPeerIpAddressIndex, cdbpPeerRowStatus=cdbpPeerRowStatus, cdbpLocalSctpListenPort=cdbpLocalSctpListenPort, cdbpPeerStatsCEAsIn=cdbpPeerStatsCEAsIn)
174.617021
9,504
0.799176
10,960
98,484
7.181022
0.080018
0.05123
0.089652
0.00925
0.532934
0.395952
0.348038
0.32654
0.284789
0.274675
0
0.043887
0.084024
98,484
563
9,505
174.927176
0.828576
0.003635
0
0.023466
0
0.084838
0.383573
0.108941
0
0
0
0
0
1
0
false
0
0.016245
0
0.016245
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
48ff11c606361c503d4ae242b33d2e5d2c9cf908
1,337
py
Python
py_build/funcs.py
Aesonus/py-build
790a750492b0f6ecd52f6f564d3aa4522e255406
[ "MIT" ]
null
null
null
py_build/funcs.py
Aesonus/py-build
790a750492b0f6ecd52f6f564d3aa4522e255406
[ "MIT" ]
null
null
null
py_build/funcs.py
Aesonus/py-build
790a750492b0f6ecd52f6f564d3aa4522e255406
[ "MIT" ]
null
null
null
from __future__ import annotations from typing import Callable, Sequence, TYPE_CHECKING import functools if TYPE_CHECKING: from .build import BuildStepCallable def split_step_name(name: str, new = ' ', old='_'): return name.replace(old, new).capitalize() def print_step_name(formatter=split_step_name, args: Sequence=()): """Gets a decorator that formats the name of the build step and prints it""" fmt_args = args def format_step_name(func: Callable): @functools.wraps(func) def decorated(*args, **kwargs): print(formatter(func.__name__, *fmt_args)) return func(*args, **kwargs) return decorated return format_step_name def print_step_doc(): def decorate_with(func: Callable): @functools.wraps(func) def output_func_doc(*args, **kwargs): print(func.__doc__) return func(*args, *kwargs) return output_func_doc return decorate_with def composed(*decorators: BuildStepCallable) -> BuildStepCallable: """ Used to compose a decorator. Useful for defining specific outputs and progress reports to a build step and resusing """ def decorated(func: BuildStepCallable): for decorator in reversed(decorators): func = decorator(func) return func return decorated
33.425
80
0.682872
162
1,337
5.419753
0.376543
0.045558
0.029613
0.059226
0.134396
0.075171
0
0
0
0
0
0
0.229619
1,337
39
81
34.282051
0.852427
0.139117
0
0.133333
0
0
0.001776
0
0
0
0
0
0
1
0.3
false
0
0.133333
0.033333
0.733333
0.133333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
5b0f67ce020d1273d176ad58ddcab8801ec9c7f2
181
py
Python
Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py
Arbupa/DAS_Sistemas
52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1
[ "MIT" ]
41
2017-09-26T09:36:32.000Z
2022-03-19T18:05:25.000Z
Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py
Arbupa/DAS_Sistemas
52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1
[ "MIT" ]
67
2017-09-11T05:06:12.000Z
2022-02-14T04:44:04.000Z
Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py
Arbupa/DAS_Sistemas
52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1
[ "MIT" ]
210
2017-09-01T00:10:08.000Z
2022-03-19T18:05:12.000Z
cantidad= input("Cuantas personas van a cenar?") cant = int(cantidad) print(cant) if cant > 8: print("Lo siento, tendran que esperar") else: print("La mesa esta lista")
15.083333
48
0.674033
27
181
4.518519
0.814815
0
0
0
0
0
0
0
0
0
0
0.006897
0.198895
181
11
49
16.454545
0.834483
0
0
0
0
0
0.430168
0
0
0
0
0
0
1
0
false
0
0
0
0
0.428571
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
5b10fde1a0b02a1e7f85ed42e2bfe8b97109fa80
514
py
Python
parse_cookie.py
olnikiforov/hillel
911bb94169aa277932e346e564e5efd69073d634
[ "MIT" ]
null
null
null
parse_cookie.py
olnikiforov/hillel
911bb94169aa277932e346e564e5efd69073d634
[ "MIT" ]
1
2021-04-01T18:56:38.000Z
2021-04-01T18:56:38.000Z
parse_cookie.py
olnikiforov/hillel
911bb94169aa277932e346e564e5efd69073d634
[ "MIT" ]
null
null
null
def parse_cookie(query: str) -> dict: res = {} if query: data = query.split(';') for i in data: if '=' in i: res[i.split('=')[0]] = '='.join(i.split('=')[1:]) return res if __name__ == '__main__': assert parse_cookie('name=Dima;') == {'name': 'Dima'} assert parse_cookie('') == {} assert parse_cookie('name=Dima;age=28;') == {'name': 'Dima', 'age': '28'} assert parse_cookie('name=Dima=User;age=28;') == {'name': 'Dima=User', 'age': '28'}
30.235294
87
0.509728
66
514
3.772727
0.363636
0.192771
0.273092
0.253012
0.405622
0
0
0
0
0
0
0.025907
0.249027
514
16
88
32.125
0.619171
0
0
0
0
0
0.196498
0.042802
0
0
0
0
0.307692
1
0.076923
false
0
0
0
0.153846
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
5b14c0f520aa2dfc088e43cb4960682061f61a03
409
py
Python
netrd/__init__.py
sdmccabe/netrd
f703c19b02f42c9f54bcab57014381da11dd58da
[ "MIT" ]
116
2019-01-17T18:31:43.000Z
2022-03-31T13:37:21.000Z
netrd/__init__.py
sdmccabe/netrd
f703c19b02f42c9f54bcab57014381da11dd58da
[ "MIT" ]
175
2019-01-15T01:19:13.000Z
2021-05-25T16:51:26.000Z
netrd/__init__.py
sdmccabe/netrd
f703c19b02f42c9f54bcab57014381da11dd58da
[ "MIT" ]
36
2019-01-14T20:38:32.000Z
2022-01-21T20:58:38.000Z
""" netrd ----- netrd stands for Network Reconstruction and Distances. It is a repository of different algorithms for constructing a network from time series data, as well as for comparing two networks. It is the product of the Network Science Insitute 2019 Collabathon. """ from . import distance # noqa from . import reconstruction # noqa from . import dynamics # noqa from . import utilities # noqa
25.5625
73
0.760391
57
409
5.45614
0.614035
0.128617
0.135048
0
0
0
0
0
0
0
0
0.011976
0.183374
409
15
74
27.266667
0.919162
0.704156
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
d2a75f44feb7064f817bce0160b3db28ad77852c
597
py
Python
barcode/charsets/ean.py
Azd325/python-barcode
b41b1d5d479fb0ad3290a0a6235a8d3203d34ee9
[ "MIT" ]
null
null
null
barcode/charsets/ean.py
Azd325/python-barcode
b41b1d5d479fb0ad3290a0a6235a8d3203d34ee9
[ "MIT" ]
null
null
null
barcode/charsets/ean.py
Azd325/python-barcode
b41b1d5d479fb0ad3290a0a6235a8d3203d34ee9
[ "MIT" ]
null
null
null
EDGE = '101' MIDDLE = '01010' CODES = { 'A': ( '0001101', '0011001', '0010011', '0111101', '0100011', '0110001', '0101111', '0111011', '0110111', '0001011' ), 'B': ( '0100111', '0110011', '0011011', '0100001', '0011101', '0111001', '0000101', '0010001', '0001001', '0010111' ), 'C': ( '1110010', '1100110', '1101100', '1000010', '1011100', '1001110', '1010000', '1000100', '1001000', '1110100' ), } LEFT_PATTERN = ( 'AAAAAA', 'AABABB', 'AABBAB', 'AABBBA', 'ABAABB', 'ABBAAB', 'ABBBAA', 'ABABAB', 'ABABBA', 'ABBABA' )
28.428571
73
0.515913
50
597
6.14
1
0
0
0
0
0
0
0
0
0
0
0.480176
0.239531
597
20
74
29.85
0.196035
0
0
0.15
0
0
0.470687
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d2c143baf7ea1e8434d64873e45800bbd43dfe04
444
py
Python
sdk/python/approzium/mysql/connector/pooling.py
UpGado/approzium
306b40f16a1ba0dfbe3a312e1c40881e98518137
[ "Apache-2.0" ]
59
2020-07-14T17:18:09.000Z
2022-02-24T07:39:22.000Z
sdk/python/approzium/mysql/connector/pooling.py
UpGado/approzium
306b40f16a1ba0dfbe3a312e1c40881e98518137
[ "Apache-2.0" ]
66
2020-07-09T19:11:55.000Z
2022-03-15T11:42:55.000Z
sdk/python/approzium/mysql/connector/pooling.py
UpGado/approzium
306b40f16a1ba0dfbe3a312e1c40881e98518137
[ "Apache-2.0" ]
9
2020-07-09T19:20:45.000Z
2022-02-24T07:39:26.000Z
from mysql.connector.pooling import MySQLConnectionPool from ._connect import _parse_kwargs, _patch_MySQLConnection class MySQLConnectionPool(MySQLConnectionPool): def set_config(self, **kwargs): kwargs = _parse_kwargs(kwargs) super(MySQLConnectionPool, self).set_config(**kwargs) def add_connection(self, cnx=None): with _patch_MySQLConnection(include_pooling=True): super().add_connection(cnx)
31.714286
61
0.75
47
444
6.787234
0.510638
0.068966
0
0
0
0
0
0
0
0
0
0
0.164414
444
13
62
34.153846
0.859838
0
0
0
0
0
0
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d2c30d506f338f0ad2e0b0a0c5af2f47676aea3a
267
py
Python
setup.py
Faust-Wang/vswarm
d18ce643218c18ef1e762f40562104b2a0926ad7
[ "MIT" ]
21
2021-03-03T10:51:46.000Z
2022-03-28T11:00:35.000Z
setup.py
Faust-Wang/vswarm
d18ce643218c18ef1e762f40562104b2a0926ad7
[ "MIT" ]
2
2021-07-21T07:57:16.000Z
2022-03-17T12:41:51.000Z
setup.py
hvourtsis/vswarm
d18ce643218c18ef1e762f40562104b2a0926ad7
[ "MIT" ]
8
2021-02-27T14:29:55.000Z
2022-01-05T19:40:38.000Z
# Do not manually invoke this setup.py, use catkin instead! from setuptools import setup from catkin_pkg.python_setup import generate_distutils_setup setup_args = generate_distutils_setup( packages=['vswarm'], package_dir={'': 'src'} ) setup(**setup_args)
22.25
60
0.764045
36
267
5.416667
0.638889
0.174359
0.225641
0
0
0
0
0
0
0
0
0
0.138577
267
11
61
24.272727
0.847826
0.213483
0
0
1
0
0.043269
0
0
0
0
0
0
1
0
false
0
0.285714
0
0.285714
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d2d95eb0f80255c257603ed734e875c5ce26b88b
2,945
py
Python
authors/apps/profiles/tests/test_follow.py
KabohaJeanMark/ah-backend-invictus
a9cf930934e8cbcb4ee370a088df57abe50ee6d6
[ "BSD-3-Clause" ]
7
2021-03-04T09:29:13.000Z
2021-03-17T17:35:42.000Z
authors/apps/profiles/tests/test_follow.py
KabohaJeanMark/ah-backend-invictus
a9cf930934e8cbcb4ee370a088df57abe50ee6d6
[ "BSD-3-Clause" ]
25
2019-04-23T18:51:02.000Z
2021-06-10T21:22:47.000Z
authors/apps/profiles/tests/test_follow.py
KabohaJeanMark/ah-backend-invictus
a9cf930934e8cbcb4ee370a088df57abe50ee6d6
[ "BSD-3-Clause" ]
7
2019-06-29T10:40:38.000Z
2019-09-23T09:05:45.000Z
from django.urls import reverse from rest_framework import status from .base import BaseTestCase class FollowTestCase(BaseTestCase): """Testcases for following a user.""" def test_follow_user_post(self): """Test start following a user.""" url = reverse('follow', kwargs={'username': 'test2'}) response = self.client.post(url, HTTP_AUTHORIZATION=self.auth_header) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_post_follow_already_followed_user(self): """Test start following a user you already follow.""" url = reverse('follow', kwargs={'username': 'test2'}) self.client.post(url, HTTP_AUTHORIZATION=self.auth_header) response = self.client.post(url, HTTP_AUTHORIZATION=self.auth_header) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_follow_missing_user_post(self): """Test trying to start following a missing user.""" url = reverse('follow', kwargs={'username': 'joel'}) response = self.client.post(url, HTTP_AUTHORIZATION=self.auth_header) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_delete_follow(self): """Test unfollowing a user""" url = reverse('follow', kwargs={'username': 'test2'}) self.client.post(url, HTTP_AUTHORIZATION=self.auth_header) response = self.client.delete(url, HTTP_AUTHORIZATION=self.auth_header) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_delete_follow_of_not_followed_user(self): """Test unfollowing a user you are not following""" url = reverse('follow', kwargs={'username': 'test2'}) response = self.client.delete(url, HTTP_AUTHORIZATION=self.auth_header) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_list_followers_of_user(self): """Test list followers of a user""" url_followers = reverse('getfollowers', kwargs={'username': 'test2'}) self.client.get(url_followers, HTTP_AUTHORIZATION=self.auth_header) url_follow = reverse('follow', kwargs={'username': 'test2'}) self.client.post(url_follow, HTTP_AUTHORIZATION=self.auth_header) response = self.client.get(url_followers, HTTP_AUTHORIZATION=self.auth_header) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_list_user_is_following(self): """Test list users the user is following""" url_following = reverse('getfollowing', kwargs={'username': 'test1'}) self.client.get(url_following, HTTP_AUTHORIZATION=self.auth_header) url_follow = reverse('follow', kwargs={'username': 'test2'}) self.client.post(url_follow, HTTP_AUTHORIZATION=self.auth_header) response = self.client.get(url_following, HTTP_AUTHORIZATION=self.auth_header) self.assertEqual(response.status_code, status.HTTP_200_OK)
50.775862
86
0.711036
370
2,945
5.421622
0.17027
0.064806
0.136092
0.162014
0.727318
0.696411
0.655035
0.655035
0.65005
0.615155
0
0.011895
0.172156
2,945
57
87
51.666667
0.810911
0.099491
0
0.5
0
0
0.069705
0
0
0
0
0
0.175
1
0.175
false
0
0.075
0
0.275
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d2dbe93b08cbd7c9fba4a7da5b0696432c491446
2,860
py
Python
rqt_mypkg/src/rqt_mypkg/statistics.py
mounteverset/moveit_path_visualizer
15e55c631cb4c4d052763ebd695ce5fcb6de5a4c
[ "BSD-3-Clause" ]
null
null
null
rqt_mypkg/src/rqt_mypkg/statistics.py
mounteverset/moveit_path_visualizer
15e55c631cb4c4d052763ebd695ce5fcb6de5a4c
[ "BSD-3-Clause" ]
null
null
null
rqt_mypkg/src/rqt_mypkg/statistics.py
mounteverset/moveit_path_visualizer
15e55c631cb4c4d052763ebd695ce5fcb6de5a4c
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 import sys import copy from moveit_commander import move_group import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math import pi, sqrt, pow from std_msgs.msg import String import io import shutil import json #used to convert the points from the gui in a valid message for ros from geometry_msgs.msg import Pose, PoseStamped #used to read out the start points import os from nav_msgs.msg import Path #used for publishing the planned path from start to goal from visualization_msgs.msg import Marker, MarkerArray #used to make a service request from moveit_msgs.srv import GetPositionIKRequest, GetPositionIK from rqt_mypkg import path_planning_interface from trajectory_msgs.msg import JointTrajectoryPoint ## StatsitcisDefinedPath is used to get the path length of given points/positions generated by the Motion Plan class StatisticsDefinedPath(object): ## Returns the path length # @param eef_poses A list of end effector poses derived from the motion between start and goal pose def get_path_length(self, eef_poses): path_length = 0 for i in range(len(eef_poses) - 1): ## @var posex # position x of the given position/point posex = eef_poses[i].position.x ## @var posey # position y of the given position/point posey = eef_poses[i].position.y ## @var posez # position z of the given position/point posez = eef_poses[i].position.z ## @var posex1 # position x of the next given position/point posex1 = eef_poses[i+1].position.x ## @var posey1 # position y of the next given position/point posey1 = eef_poses[i+1].position.y ## @var posez1 # position z of the next given position/point posez1 = eef_poses[i+1].position.z ## @var path_length # formula to get the length of 2 corresponding points path_length += sqrt(pow((posex1 - posex), 2) + pow((posey1- posey), 2))+ pow((posez1-posez),2) return path_length ## Returns the maximum joint acceleration of every acceleration measured # @param motion_plan The motion plan retrieved by the planner def get_max_joint_acceleration(self, motion_plan): ## @var maxlist # This list contains all accelerations given by the motion plan maxlist = [] for i in range(len(motion_plan[1].joint_trajectory.points)): for j in range(len(motion_plan[1].joint_trajectory.points[i].accelerations)): for k in range(len(motion_plan[1].joint_trajectory.points[i].accelerations)): maxlist.append(motion_plan[1].joint_trajectory.points[i].accelerations[j]) return max(maxlist)
39.178082
110
0.681469
401
2,860
4.755611
0.316708
0.047195
0.040902
0.033561
0.22129
0.147352
0.104877
0.104877
0.080755
0.058731
0
0.010723
0.25
2,860
73
111
39.178082
0.878322
0.35979
0
0
0
0
0
0
0
0
0
0
0
1
0.052632
false
0
0.5
0
0.631579
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
d2dbfa2d8a9c4169b00a898c87b761496a338473
596
py
Python
apps/sendmail/admin.py
CasualGaming/studlan
63daed67c1d309e4d5bd755eb68163e2174d0e00
[ "MIT" ]
9
2016-03-15T21:03:49.000Z
2020-12-02T19:45:44.000Z
apps/sendmail/admin.py
piyushd26/studlan
6eb96ebda182f44759b430cd497a727e0ee5bb63
[ "MIT" ]
161
2016-02-05T14:11:50.000Z
2020-10-14T10:13:21.000Z
apps/sendmail/admin.py
piyushd26/studlan
6eb96ebda182f44759b430cd497a727e0ee5bb63
[ "MIT" ]
11
2016-07-27T12:20:05.000Z
2021-04-18T05:49:17.000Z
# -*- coding: utf-8 -*- from django.contrib import admin from .models import Mail class MailAdmin(admin.ModelAdmin): list_display = ['subject', 'sent_time', 'recipients_total', 'successful_mails', 'failed_mails', 'done_sending'] ordering = ['-sent_time'] # Prevent creation def has_add_permission(self, request, obj=None): return False # Prevent changes def save_model(self, request, obj, form, change): pass # Prevent M2M changes def save_related(self, request, form, formsets, change): pass admin.site.register(Mail, MailAdmin)
22.074074
115
0.676174
72
596
5.444444
0.680556
0.084184
0.071429
0
0
0
0
0
0
0
0
0.004237
0.208054
596
26
116
22.923077
0.826271
0.124161
0
0.166667
0
0
0.158607
0
0
0
0
0
0
1
0.25
false
0.166667
0.166667
0.083333
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
d2dcba40eaf1e9db722986c2a78f80438fb6fdb3
1,066
py
Python
aoc/year_2020/day_06/solver.py
logan-connolly/AoC
23f47e72abaf438cc97897616be4d6b057a01bf3
[ "MIT" ]
2
2020-12-06T10:59:52.000Z
2021-09-29T22:14:03.000Z
aoc/year_2020/day_06/solver.py
logan-connolly/AoC
23f47e72abaf438cc97897616be4d6b057a01bf3
[ "MIT" ]
null
null
null
aoc/year_2020/day_06/solver.py
logan-connolly/AoC
23f47e72abaf438cc97897616be4d6b057a01bf3
[ "MIT" ]
2
2021-09-29T22:14:18.000Z
2022-01-18T02:20:26.000Z
"""This is the Solution for Year 2020 Day 06""" import re from aoc.abstracts.solver import Answers, StrLines class Solver: def __init__(self, data: str) -> None: self.data = data def _preprocess(self) -> StrLines: delim = "\n\n" return self.data.split(delim) def _solve_part_one(self, lines: StrLines) -> int: cleaned = [re.sub(r"\n", "", answer).strip() for answer in lines] return sum(len(set(answer)) for answer in cleaned) def _solve_part_two(self, lines: StrLines) -> int: cleaned = [answer.rstrip("\n").split("\n") for answer in lines] shared_answer_count = 0 for group in cleaned: shared_answers = set.intersection(*[set(member) for member in group]) shared_answer_count += len(shared_answers) return shared_answer_count def solve(self) -> Answers: lines = self._preprocess() ans_one = self._solve_part_one(lines) ans_two = self._solve_part_two(lines) return Answers(part_one=ans_one, part_two=ans_two)
32.30303
81
0.641651
146
1,066
4.465753
0.356164
0.055215
0.050614
0.06135
0.082822
0
0
0
0
0
0
0.008696
0.244841
1,066
32
82
33.3125
0.801242
0.038462
0
0
0
0
0.009814
0
0
0
0
0
0
1
0.217391
false
0
0.086957
0
0.521739
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d2f5d91da9ad5c16c7e8d867f33c570f4ad80d87
1,127
py
Python
notebooks/denerator_tests/actions/config.py
Collen-Roller/Rasa-Denerator
728d21d93f21a18c9de7be303ceae59392de9a41
[ "MIT" ]
11
2019-09-11T13:48:53.000Z
2021-11-26T00:48:57.000Z
notebooks/denerator_tests/actions/config.py
Collen-Roller/Rasa-Denerator
728d21d93f21a18c9de7be303ceae59392de9a41
[ "MIT" ]
2
2019-10-18T17:21:54.000Z
2021-10-08T06:45:11.000Z
notebooks/denerator_tests/actions/config.py
Collen-Roller/Rasa-Denerator
728d21d93f21a18c9de7be303ceae59392de9a41
[ "MIT" ]
4
2019-10-04T14:43:06.000Z
2021-06-16T21:23:23.000Z
import os policy_model_dir = os.environ.get("POLICY_MODEL_DIR", "models/dialogue/") rasa_nlu_config = os.environ.get("RASA_NLU_CONFIG", "nlu_config.yml") account_sid = os.environ.get("ACCOUNT_SID", "") auth_token = os.environ.get("AUTH_TOKEN", "") twilio_number = os.environ.get("TWILIO_NUMBER", "") platform_api = os.environ.get("RASA_API_ENDPOINT_URL", "") self_port = int(os.environ.get("SELF_PORT", "5001")) core_model_dir = os.environ.get("CORE_MODEL_DIR", "models/dialogue/") remote_core_endpoint = os.environ.get("RASA_REMOTE_CORE_ENDPOINT_URL", "") rasa_core_token = os.environ.get("RASA_CORE_TOKEN", "") mailchimp_api_key = os.environ.get("MAILCHIMP_API_KEY", "") mailchimp_list = os.environ.get("MAILCHIMP_LIST", "") gdrive_credentials = os.environ.get("GDRIVE_CREDENTIALS", "") access_token = os.environ.get("TELEGRAM_TOKEN", "") verify = os.environ.get("TELEGRAM_VERIFY", "rasas_bot") webhook_url = os.environ.get("WEBHOOK_URL", "https://website-demo.rasa.com/webhook") rasa_platform_token = os.environ.get("RASA_PLATFORM_TOKEN", "") rasa_nlg_endpoint = os.environ.get("RASA_NLG_ENDPOINT_URL", "")
30.459459
84
0.747116
165
1,127
4.751515
0.272727
0.206633
0.27551
0.122449
0.165816
0
0
0
0
0
0
0.003861
0.080745
1,127
37
85
30.459459
0.752896
0
0
0
0
0
0.335106
0.062943
0
0
0
0
0
1
0
false
0
0.052632
0
0.052632
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
96024e0d78c0a224ad13e044ee7fc8d5953df2e6
259
py
Python
app/__init__.py
nic-mon/IAIOLab
b8c4a23c95ee722938b393e4824b7fc94447f17c
[ "MIT" ]
null
null
null
app/__init__.py
nic-mon/IAIOLab
b8c4a23c95ee722938b393e4824b7fc94447f17c
[ "MIT" ]
null
null
null
app/__init__.py
nic-mon/IAIOLab
b8c4a23c95ee722938b393e4824b7fc94447f17c
[ "MIT" ]
1
2018-04-11T00:34:09.000Z
2018-04-11T00:34:09.000Z
from flask import Flask """ 1. Creating a flask application instance, the name argument is passed to flask application constructor. It's used to determine the root path""" app = Flask(__name__) app.config.from_object('config') from app import views, models
28.777778
82
0.776062
40
259
4.9
0.65
0.163265
0
0
0
0
0
0
0
0
0
0.004525
0.146718
259
8
83
32.375
0.882353
0
0
0
0
0
0.053571
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
9607844773359aa6aa0c7976c01c1f1c73d9292a
145
py
Python
cryptos.py
pogoetic/tricero
6cb60e780bf9056ad9887a84e2ba7d73787ac2fc
[ "MIT" ]
null
null
null
cryptos.py
pogoetic/tricero
6cb60e780bf9056ad9887a84e2ba7d73787ac2fc
[ "MIT" ]
null
null
null
cryptos.py
pogoetic/tricero
6cb60e780bf9056ad9887a84e2ba7d73787ac2fc
[ "MIT" ]
null
null
null
cryptolist = ['ETH','BTC','XRP','EOS','ADA','NEO','STEEM', 'BTS','ZEC','XMR','XVG','XEM','OMG','MIOTA','XTZ','SC', 'CVC','BAT','XLM','ZRX','VEN']
48.333333
58
0.524138
22
145
3.454545
1
0
0
0
0
0
0
0
0
0
0
0
0.027586
145
3
59
48.333333
0.539007
0
0
0
0
0
0.452055
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
960fe6f4df41a131c506151d154738d3ea6e3c53
533
py
Python
alerter/src/alerter/alert_code/node/evm_alert_code.py
SimplyVC/panic
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
[ "Apache-2.0" ]
41
2019-08-23T12:40:42.000Z
2022-03-28T11:06:02.000Z
alerter/src/alerter/alert_code/node/evm_alert_code.py
SimplyVC/panic
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
[ "Apache-2.0" ]
147
2019-08-30T22:09:48.000Z
2022-03-30T08:46:26.000Z
alerter/src/alerter/alert_code/node/evm_alert_code.py
SimplyVC/panic
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
[ "Apache-2.0" ]
3
2019-09-03T21:12:28.000Z
2021-08-18T14:27:56.000Z
from ..alert_code import AlertCode class EVMNodeAlertCode(AlertCode): NoChangeInBlockHeight = 'evm_node_alert_1' BlockHeightUpdatedAlert = 'evm_node_alert_2' BlockHeightDifferenceIncreasedAboveThresholdAlert = 'evm_node_alert_3' BlockHeightDifferenceDecreasedBelowThresholdAlert = 'evm_node_alert_4' InvalidUrlAlert = 'evm_node_alert_5' ValidUrlAlert = 'evm_node_alert_6' NodeWentDownAtAlert = 'evm_node_alert_7' NodeBackUpAgainAlert = 'evm_node_alert_8' NodeStillDownAlert = 'evm_node_alert_9'
38.071429
74
0.806754
53
533
7.584906
0.509434
0.156716
0.268657
0
0
0
0
0
0
0
0
0.019481
0.133208
533
13
75
41
0.850649
0
0
0
0
0
0.270169
0
0
0
0
0
0
1
0
false
0
0.090909
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
961296a2dbd17acbbeca5341d04b5200b3df15a3
4,973
py
Python
linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/Axon/idGen.py
mdavid/nuxleus
653f1310d8bf08eaa5a7e3326c2349e56a6abdc2
[ "BSD-3-Clause" ]
1
2017-03-28T06:41:51.000Z
2017-03-28T06:41:51.000Z
linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/Axon/idGen.py
mdavid/nuxleus
653f1310d8bf08eaa5a7e3326c2349e56a6abdc2
[ "BSD-3-Clause" ]
null
null
null
linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/Axon/idGen.py
mdavid/nuxleus
653f1310d8bf08eaa5a7e3326c2349e56a6abdc2
[ "BSD-3-Clause" ]
1
2016-12-13T21:08:58.000Z
2016-12-13T21:08:58.000Z
#!/usr/bin/python # # Copyright (C) 2004 British Broadcasting Corporation and Kamaelia Contributors(1) # All Rights Reserved. # # You may only modify and redistribute this under the terms of any of the # following licenses(2): Mozilla Public License, V1.1, GNU General # Public License, V2.0, GNU Lesser General Public License, V2.1 # # (1) Kamaelia Contributors are listed in the AUTHORS file and at # http://kamaelia.sourceforge.net/AUTHORS - please extend this file, # not this notice. # (2) Reproduced in the COPYING file, and at: # http://kamaelia.sourceforge.net/COPYING # Under section 3.5 of the MPL, we are using this text since we deem the MPL # notice inappropriate for this file. As per MPL/GPL/LGPL removal of this # notice is prohibited. # # Please contact us via: kamaelia-list-owner@lists.sourceforge.net # to discuss alternative licensing. # ------------------------------------------------------------------------- """\ ==================== Unique ID generation ==================== The methods of the idGen class are used to generate unique IDs in various forms (numbers, strings, etc) which are used to give microprocesses and other Axon objects a unique identifier and name. * Every Axon.Microprocess.microprocess gets a unique ID * Axon.ThreadedComponent.threadedcomponent uses unique IDs to identify threads Generating a new unique ID -------------------------- Do not use the idGen class defined in this module directly. Instead, use any of these module methods to obtain a unique ID: * **Axon.idGen.newId(thing)** - returns a unique identifier as a string based on the class name of the object provided * **Axon.idGen.strId(thing)** - returns a unique identifier as a string based on the class name of the object provided * **Axon.idGen.numId()** - returns a unique identifier as a number * **Axon.idGen.tupleId(thing)** - returns both the numeric and string versions of a new unique id as a tuple (where the string version is based on the class name of the object provided) Calling tupleId(thing) is *not* equivalent to calling numId() then strId(thing) because doing that would return two different id values! Examples:: >>> x=Component.component() >>> idGen.newId(x) 'Component.component_4' >>> idGen.strId(x) 'Component.component_5' >>> idGen.numId() 6 >>> idGen.tupleId(x) (7, 'Component.component_7') """ import debug; debugger = debug.debug() debugger.useConfig() Debug = debugger.debug # idGen - A class to provide Unique Identifiers # # Ids can provide be provided as numerical, string or a tuple. # # numerical ids are integers allocated on a "next integer" basis. # eg object 1, apple 2, orange 3. (Not object 1, apple 2, orange 3) # # string ids consist of the '__str__' of the object, with the numerical # id tacked on the end. # # tuple ids consists : '(the numerical id, the string id)' # class idGen(object): """\ Unique ID creator. Use numId(), strId(), and tupleId() methods to obtain unique IDs. """ lowestAllocatedId = 0 def nextId(self): """\ **INTERNAL** Returns the next unique id, incrementing the private class variable """ idGen.lowestAllocatedId = idGen.lowestAllocatedId +1 return idGen.lowestAllocatedId next = nextId # pseudonym def idToString(self,thing,aNumId): """\ **INTERNAL** Combines the 'str()' of the object's class with the id to form a string id """ # This next line takes <class '__main__.foo'> # and chops out the __main__.foo part r = str(thing.__class__)[8:][:-2] + "_" + str(aNumId) return r def numId(self): """Allocates & returns the next available id""" result = self.nextId() assert Debug("idGen.numId", 1, "idGen.numId:", result) return result def strId(self,thing): """\ Allocates & returns the next available id combined with the object's class name, in string form """ theId = self.nextId() strid = self.idToString(thing,theId) assert Debug("idGen.strId", 1, "idGen.strId:", strid) return strid def tupleId(self,thing): """\ Allocates the next available id and returns it both as a tuple (num,str) containing both the numeric version and a string version where it is combined with the object's class name. """ theId = self.nextId() strId = self.idToString(thing,theId) assert Debug("idGen.tupleId", 1, "idGen.tupleId:", theId, strId) return theId, strId newId = idGen().strId strId=idGen().strId numId=idGen().numId tupleId=idGen().tupleId if __name__ == '__main__': class foo: pass class bar: pass class bibble: pass print newId(foo()) print newId(bar()) print newId(bibble())
31.474684
83
0.646893
662
4,973
4.817221
0.330816
0.014111
0.017247
0.022578
0.197241
0.186579
0.144246
0.102854
0.102854
0.090938
0
0.008905
0.232254
4,973
157
84
31.675159
0.826349
0.278504
0
0.052632
0
0
0.067268
0
0
0
0
0
0.078947
0
null
null
0.078947
0.026316
null
null
0.078947
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
961374e180229cec23558c1850e6a56b8464ae8b
63,005
py
Python
pyCEvNS/flux.py
athompson-tamu/pyCEvNS
feb3f83c706e6604608eae83c50ac79ced9140bf
[ "MIT" ]
null
null
null
pyCEvNS/flux.py
athompson-tamu/pyCEvNS
feb3f83c706e6604608eae83c50ac79ced9140bf
[ "MIT" ]
null
null
null
pyCEvNS/flux.py
athompson-tamu/pyCEvNS
feb3f83c706e6604608eae83c50ac79ced9140bf
[ "MIT" ]
null
null
null
""" flux related class and functions """ from scipy.integrate import quad import pandas as pd from .helper import LinearInterp, polar_to_cartesian, lorentz_boost, lorentz_matrix from .oscillation import survival_solar from .parameters import * def _invs(ev): return 1/ev**2 class FluxBaseContinuous: def __init__(self, ev, flux, norm=1): self.norm = norm self.ev = ev self.fx = flux self.ev_min = self.ev[0] self.ev_max = self.ev[-1] self.binw = self.ev[1:] - self.ev[:-1] self.precalc = {None: self.binw*(self.fx[1:]+self.fx[:-1])/2} def __call__(self, ev): if ev == self.ev_min: return self.fx[0] * self.norm if ev == self.ev_max: return self.fx[-1] * self.norm if self.ev_min < ev < self.ev_max: idx = self.ev.searchsorted(ev) l1 = ev - self.ev[idx-1] l2 = self.ev[idx] - ev h1 = self.fx[idx-1] h2 = self.fx[idx] return (l1*h2 + l2*h1) / (l1 + l2) * self.norm return 0 def integrate(self, ea, eb, weight_function=None): if eb <= ea: return 0 res = 0 if weight_function not in self.precalc: weighted = weight_function(self.ev)*self.fx self.precalc[weight_function] = self.binw * (weighted[1:]+weighted[:-1]) / 2 eb = min(eb, self.ev_max) ea = max(ea, self.ev_min) idxmin = self.ev.searchsorted(ea, side='right') idxmax = self.ev.searchsorted(eb, side='left') if idxmin == idxmax: l1 = ea - self.ev[idxmin - 1] l2 = self.ev[idxmin] - ea h1 = self.fx[idxmin - 1] * weight_function(self.ev[idxmin - 1]) \ if weight_function is not None else self.fx[idxmin - 1] h2 = self.fx[idxmin] * weight_function(self.ev[idxmin]) \ if weight_function is not None else self.fx[idxmin] ha = (l1*h2+l2*h1)/(l1+l2) l1 = eb - self.ev[idxmax - 1] l2 = self.ev[idxmax] - eb hb = (l1*h2+l2*h1)/(l1+l2) return (ha + hb) * (eb - ea) / 2 * self.norm res += np.sum(self.precalc[weight_function][idxmin:idxmax-1]) l1 = ea - self.ev[idxmin-1] l2 = self.ev[idxmin] - ea h1 = self.fx[idxmin-1]*weight_function(self.ev[idxmin-1]) \ if weight_function is not None else self.fx[idxmin-1] h2 = self.fx[idxmin]*weight_function(self.ev[idxmin]) \ if weight_function is not None else self.fx[idxmin] res += ((l1*h2+l2*h1)/(l1+l2)+h2)*l2/2 l1 = eb - self.ev[idxmax - 1] l2 = self.ev[idxmax] - eb h1 = self.fx[idxmax - 1] * weight_function(self.ev[idxmax - 1]) \ if weight_function is not None else self.fx[idxmax-1] h2 = self.fx[idxmax] * weight_function(self.ev[idxmax]) \ if weight_function is not None else self.fx[idxmax] res += ((l1 * h2 + l2 * h1) / (l1 + l2) + h1) * l1 / 2 return res * self.norm class Flux: """ flux class, flux at source """ def __init__(self, fl_name, delimiter=',', fl_unc=0): """ initializing flux, can take in user provided flux restrictions: user provided data must have 7 columns, first column is neutrino energy in MeV, other columns are neutrino flux in cm^2/s/MeV, they are enu, munu, taunu, enubar, munubar, taunubar :param fl_name: name of the flux or path to the file or array of neutrino flux :param delimiter: delimiter of the input file, default is ',' :param fl_unc: uncertainty of flux """ if isinstance(fl_name, str): self.fl_name = fl_name.lower() else: self.fl_name = 'default' if self.fl_name == 'reactor': self.evMin = 0.0 self.evMax = 30 # MeV self.flUn = 0.02 fpers = 3.0921 * (10 ** 16) # antineutrinos per fission nuperf = 6.14102 self.__nuflux1m = nuperf * fpers / (4 * np.pi) * (meter_by_mev ** 2) elif self.fl_name in ['sns', 'prompt', 'delayed']: self.evMin = 0 self.evMax = 52 # MeV self.flUn = 0.1 self.__norm = 1.13 * (10 ** 11) * (meter_by_mev ** 2) elif self.fl_name in ['solar', 'b8', 'f17', 'n13', 'o15', 'pp', 'hep']: f = np.genfromtxt(pkg_resources.resource_filename(__name__, 'data/' + self.fl_name + '.csv'), delimiter=',') self.flUn = 0 self.evMin = f[0, 0] self.evMax = f[-1, 0] self.__nue = LinearInterp(f[:, 0], f[:, 1] * ((100 * meter_by_mev) ** 2)) else: if isinstance(fl_name, np.ndarray): f = fl_name else: f = np.genfromtxt(fl_name, delimiter=delimiter) self.evMin = np.amin(f[:, 0]) self.evMax = np.amax(f[:, 0]) self.flUn = fl_unc self.__nue = LinearInterp(f[:, 0], f[:, 1] * ((100 * meter_by_mev) ** 2)) self.__numu = LinearInterp(f[:, 0], f[:, 2] * ((100 * meter_by_mev) ** 2)) self.__nutau = LinearInterp(f[:, 0], f[:, 3] * ((100 * meter_by_mev) ** 2)) self.__nuebar = LinearInterp(f[:, 0], f[:, 4] * ((100 * meter_by_mev) ** 2)) self.__numubar = LinearInterp(f[:, 0], f[:, 5] * ((100 * meter_by_mev) ** 2)) self.__nutaubar = LinearInterp(f[:, 0], f[:, 6] * ((100 * meter_by_mev) ** 2)) def flux(self, ev, flavor='e', f=None, **kwargs): """ differential neutrino flux at the detector, unit MeV^-3*s^-1 :param ev: nuetrino energy :param flavor: nuetrino flavor :param f: function that convolves with neutrino flux, typically neutrino oscillation, the first argument must be neutrino energy, the last two arguments must be input flavor nui and out put flavor nuf :param kwargs: parameters with keys that goes into function f :return: neutrino flux """ if self.fl_name == 'reactor': # Phys.Rev.D39, 11 Vogel # 5.323608902707208 = Integrate[Exp[.870 - .16*e - .091*e^2], {e, 0, 10}] # reactor neutrino is actually anti-neutrino, this may cause problem when doing electron scattering if flavor == 'ebar': if f is not None: return np.exp(0.87 - 0.16 * ev - 0.091 * (ev ** 2)) / 5.323608902707208 * \ f(ev, nui='ebar', nuf=flavor, **kwargs) return np.exp(0.87 - 0.16 * ev - 0.091 * (ev ** 2)) / 5.323608902707208 * self.__nuflux1m elif flavor[-1] == 'r': if f is not None: return np.exp(0.87 - 0.16 * ev - 0.091 * (ev ** 2)) / 5.323608902707208 * \ f(ev, nui='ebar', nuf=flavor, **kwargs) return 0 else: return 0 elif self.fl_name in ['sns', 'delayed']: if flavor[-1] != 'r': if f is not None: return (3 * ((ev / (2 / 3 * 52)) ** 2) - 2 * ((ev / (2 / 3 * 52)) ** 3)) / 29.25 * self.__norm * \ f(ev, nui='e', nuf=flavor, **kwargs) return (3 * ((ev / (2 / 3 * 52)) ** 2) - 2 * ((ev / (2 / 3 * 52)) ** 3)) / 29.25 * self.__norm \ if flavor == 'e' else 0 else: if f is not None: return (3 * ((ev / 52) ** 2) - 2 * ((ev / 52) ** 3)) / 26 * self.__norm * \ f(ev, nui='mubar', nuf=flavor, **kwargs) return (3 * ((ev / 52) ** 2) - 2 * ((ev / 52) ** 3)) / 26 * self.__norm if flavor == 'mubar' else 0 elif self.fl_name == 'prompt': return 0 elif self.fl_name in ['solar', 'b8', 'f17', 'n13', 'o15', 'pp', 'hep']: if flavor[-1] != 'r': if f is None: f = survival_solar return self.__nue(ev) * f(ev, nui='e', nuf=flavor, **kwargs) return 0 else: if flavor[-1] != 'r': if f is None: if flavor == 'e': return self.__nue(ev) elif flavor == 'mu': return self.__numu(ev) elif flavor == 'tau': return self.__nutau(ev) else: return 0 return self.__nue(ev) * f(ev, nui='e', nuf=flavor, **kwargs) + \ self.__numu(ev) * f(ev, nui='mu', nuf=flavor, **kwargs) + \ self.__nutau(ev) * f(ev, nui='tau', nuf=flavor, **kwargs) else: if f is None: if flavor == 'ebar': return self.__nuebar(ev) elif flavor == 'mubar': return self.__numubar(ev) elif flavor == 'taubar': return self.__nutaubar(ev) else: return 0 return self.__nuebar(ev) * f(ev, nui='ebar', nuf=flavor, **kwargs) + \ self.__numubar(ev) * f(ev, nui='mubar', nuf=flavor, **kwargs) + \ self.__nutaubar(ev) * f(ev, nui='taubar', nuf=flavor, **kwargs) def fint(self, er, m, flavor='e', f=None, **kwargs): """ flux integration over the range that can produce a recoil energy er :param er: recoil energy :param m: mass of the target, it can be an array :param flavor: neutrino flavor :param f: function that convolves with neutrino flux, typically neutrino oscillation, the first argument must be neutrino energy, the last two arguments must be input flavor nui and out put flavor nuf :param kwargs: parameters with keys that goes into function f :return: the result of integration, it can be an array """ emin = 0.5 * (np.sqrt(er ** 2 + 2 * er * m) + er) def fx(ev): return self.flux(ev, flavor, f, **kwargs) if not isinstance(emin, np.ndarray): res = quad(fx, emin, self.evMax)[0] # no need to check range, because outside evMin and evMax are 0 if self.fl_name == 'solar': if f is None: f = survival_solar # pep res += 1.44e8 * ((100 * meter_by_mev) ** 2) * f(1.439, nui='e', nuf=flavor, **kwargs) \ if emin < 1.439 else 0 # be7 res += 5e9 * ((100 * meter_by_mev) ** 2) * f(0.8613, nui='e', nuf=flavor, **kwargs) \ if emin < 0.8613 else 0 elif self.fl_name in ['sns', 'prompt']: if f is None and flavor == 'mu': # prompt neutrino res += self.__norm if emin <= 29 else 0 elif f is not None and flavor[-1] != 'r': res += self.__norm * f(29, nui='mu', nuf=flavor, **kwargs) if emin <= 29 else 0 else: res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = quad(fx, emin[i], self.evMax)[0] if self.fl_name == 'solar': if f is None: f = survival_solar # pep res[i] += 1.44e8 * ((100 * meter_by_mev) ** 2) * f(1.439, nui='e', nuf=flavor, **kwargs) \ if emin[i] < 1.439 else 0 # be7 res[i] += 5e9 * ((100 * meter_by_mev) ** 2) * f(0.8613, nui='e', nuf=flavor, **kwargs) \ if emin[i] < 0.8613 else 0 elif self.fl_name in ['sns', 'prompt']: if f is None and flavor == 'mu': # prompt neutrino res[i] += self.__norm if emin[i] <= 29 else 0 elif f is not None and flavor[-1] != 'r': res[i] += self.__norm * f(29, nui='mu', nuf=flavor, **kwargs) if emin[i] <= 29 else 0 return res def fintinv(self, er, m, flavor='e', f=None, **kwargs): """ flux/ev integration over the range that can produce a recoil energy er :param er: recoil energy :param m: mass of the target, it can be an array :param flavor: neutrino flavor :param f: function that convolves with neutrino flux, typically neutrino oscillation, the first argument must be neutrino energy, the last two arguments must be input flavor nui and out put flavor nuf :param kwargs: parameters with keys that goes into function f :return: the result of integration, it can be an array """ emin = 0.5 * (np.sqrt(er ** 2 + 2 * er * m) + er) def finv(ev): """ flux/ev """ return self.flux(ev, flavor, f, **kwargs) / ev if not isinstance(emin, np.ndarray): res = quad(finv, emin, self.evMax)[0] if self.fl_name == 'solar': if f is None: f = survival_solar # pep res += 1.44e8 * ((100 * meter_by_mev) ** 2) * f(1.439, nui='e', nuf=flavor, **kwargs) / 1.439 \ if emin < 1.439 else 0 # be7 res += 5e9 * ((100 * meter_by_mev) ** 2) * f(0.8613, nui='e', nuf=flavor, **kwargs) / 0.8613 \ if emin < 0.8613 else 0 elif self.fl_name in ['sns', 'prompt']: if f is None and flavor == 'mu': # prompt neutrino res += self.__norm / 29 if emin <= 29 else 0 elif f is not None and flavor[-1] != 'r': res += self.__norm / 29 * f(29, nui='mu', nuf=flavor, **kwargs) if emin <= 29 else 0 else: res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = quad(finv, emin[i], self.evMax)[0] if self.fl_name == 'solar': if f is None: f = survival_solar # pep res[i] += 1.44e8 * ((100 * meter_by_mev) ** 2) * f(1.439, nui='e', nuf=flavor, **kwargs) / \ 1.439 if emin[i] < 1.439 else 0 # be7 res[i] += 5e9 * ((100 * meter_by_mev) ** 2) * f(0.8613, nui='e', nuf=flavor, **kwargs) / \ 0.8613 if emin[i] < 0.8613 else 0 elif self.fl_name in ['sns', 'prompt']: if f is None and flavor == 'mu': # prompt neutrino res[i] += self.__norm / 29 if emin[i] <= 29 else 0 elif f is not None and flavor[-1] != 'r': res[i] += self.__norm / 29 * f(29, nui='mu', nuf=flavor, **kwargs) \ if emin[i] <= 29 else 0 return res def fintinvs(self, er, m, flavor='e', f=None, **kwargs): """ flux/ev^2 integration over the range that can produce a recoil energy er :param er: recoil energy :param m: mass of the target, it can be an array :param flavor: neutrino flavor :param f: function that convolves with neutrino flux, typically neutrino oscillation, the first argument must be neutrino energy, the last two arguments must be input flavor nui and out put flavor nuf :param kwargs: parameters with keys that goes into function f :return: the result of integration, it can be an array """ emin = 0.5 * (np.sqrt(er ** 2 + 2 * er * m) + er) def finvs(ev): """ flux/ev^2 """ return self.flux(ev, flavor, f, **kwargs) / (ev ** 2) if not isinstance(emin, np.ndarray): res = quad(finvs, emin, self.evMax)[0] if self.fl_name == 'solar': if f is None: f = survival_solar # pep res += 1.44e8 * ((100 * meter_by_mev) ** 2) * f(1.439, nui='e', nuf=flavor, **kwargs) / 1.439**2\ if emin < 1.439 else 0 # be7 res += 5e9 * ((100 * meter_by_mev) ** 2) * f(0.8613, nui='e', nuf=flavor, **kwargs) / 0.8613**2 \ if emin < 0.8613 else 0 elif self.fl_name in ['sns', 'prompt']: if f is None and flavor == 'mu': # prompt neutrino res += self.__norm / 29**2 if emin <= 29 else 0 elif f is not None and flavor[-1] != 'r': res += self.__norm / 29**2 * f(29, nui='mu', nuf=flavor, **kwargs) if emin <= 29 else 0 else: res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = quad(finvs, emin[i], self.evMax)[0] if self.fl_name == 'solar': if f is None: f = survival_solar # pep res[i] += 1.44e8 * ((100 * meter_by_mev) ** 2) * f(1.439, nui='e', nuf=flavor, **kwargs) / \ 1.439**2 if emin[i] < 1.439 else 0 # be7 res[i] += 5e9 * ((100 * meter_by_mev) ** 2) * f(0.8613, nui='e', nuf=flavor, **kwargs) / \ 0.8613**2 if emin[i] < 0.8613 else 0 elif self.fl_name in ['sns', 'prompt']: if f is None and flavor == 'mu': # prompt neutrino res[i] += self.__norm / 29**2 if emin[i] <= 29 else 0 elif f is not None and flavor[-1] != 'r': res[i] += self.__norm / 29**2 * f(29, nui='mu', nuf=flavor, **kwargs) \ if emin[i] <= 29 else 0 return res class NeutrinoFluxFactory: def __init__(self): self.flux_list = ['solar', 'solar_b8', 'solar_f17', 'solar_hep', 'solar_n13', 'solar_o15', 'solar_pp', 'solar_pep', 'solar_be7', 'coherent', 'coherent_prompt', 'coherent_delayed', 'far_beam_nu', 'far_beam_nubar', 'atmospheric','jsns_prompt', 'jsns_delayed', 'jsns_prompt_continuous', 'near_beam_nu', 'near_beam_nubar',] def print_available(self): print(self.flux_list) def interp_flux(self, nrg, data): return np.interp(nrg, data[:,0], data[:,1]) def get(self, flux_name, **kwargs): if flux_name not in self.flux_list: print('flux name not in current list: ', self.flux_list) raise Exception('flux not found.') if flux_name in ['solar_b8', 'solar_f17', 'solar_hep', 'solar_n13', 'solar_o15', 'solar_pp']: f = np.genfromtxt(pkg_resources.resource_filename(__name__, 'data/' + flux_name[6:] + '.csv'), delimiter=',') return NeutrinoFlux(continuous_fluxes={'ev': f[:, 0], 'e': f[:, 1]}) if flux_name == 'solar': f = np.genfromtxt(pkg_resources.resource_filename(__name__, 'data/' + flux_name + '.csv'), delimiter=',') return NeutrinoFlux(continuous_fluxes={'ev': f[:, 0], 'e': f[:, 1]}, delta_fluxes={'e': [(1.439, 1.44e8), (0.8613, 5e9)]}) if flux_name == 'pep': return NeutrinoFlux(delta_fluxes={'e': [(1.439, 1.44e8), ]}) if flux_name == 'be7': return NeutrinoFlux(delta_fluxes={'e': [(0.8613, 5e9), ]}) if flux_name == 'coherent': def de(evv): return (3 * ((evv / (2 / 3 * 52)) ** 2) - 2 * ((evv / (2 / 3 * 52)) ** 3)) / 29.25 def dmubar(evv): return (3 * ((evv / 52) ** 2) - 2 * ((evv / 52) ** 3)) / 26 ev = np.linspace(0.001, 52, 100) return NeutrinoFlux(continuous_fluxes={'ev': ev, 'e': de(ev), 'mubar': dmubar(ev)}, delta_fluxes={'mu': [(29, 1)]}, norm=1.13 * (10 ** 7)) ## default unit is /(cm^2*s) if flux_name == 'coherent_delayed': def de(evv): return (3 * ((evv / (2 / 3 * 52)) ** 2) - 2 * ((evv / (2 / 3 * 52)) ** 3)) / 29.25 def dmubar(evv): return (3 * ((evv / 52) ** 2) - 2 * ((evv / 52) ** 3)) / 26 ev = np.linspace(0.001, 52, kwargs['npoints'] if 'npoints' in kwargs else 100) return NeutrinoFlux(continuous_fluxes={'ev': ev, 'e': de(ev), 'mubar': dmubar(ev)}, norm=1.13 * (10 ** 7)) if flux_name == 'coherent_prompt': return NeutrinoFlux(delta_fluxes={'mu': [(29, 1)]}, norm=1.13 * (10 ** 7)) if flux_name == 'jsns': nu_e = np.genfromtxt(pkg_resources.resource_filename(__name__, "data/jsns2/jsns_nu_e.txt"), delimiter=',') nu_mu = np.genfromtxt(pkg_resources.resource_filename(__name__, "data/jsns2/jsns_nu_mu_nodelta.txt"), delimiter=',') nubar_mu = np.genfromtxt(pkg_resources.resource_filename(__name__, "data/jsns2/jsns_nubar_mu.txt"), delimiter=',') norm_nu_e = quad(self.interp_flux, 0, 300, args=(nu_e,))[0] norm_nu_mu = quad(self.interp_flux, 0, 300, args=(nu_mu,))[0] norm_nubar_mu = quad(self.interp_flux, 0, 300, args=(nubar_mu,))[0] def numuPDF(energy): return self.interp_flux(energy, nu_mu) / norm_nu_mu def nuePDF(energy): return self.interp_flux(energy, nu_e) / norm_nu_e def nubarmuPDF(energy): return self.interp_flux(energy, nubar_mu) / norm_nubar_mu edges = np.arange(0, 302, 2) # energy bin edges ev = (edges[:-1] + edges[1:]) / 2 return NeutrinoFlux(continuous_fluxes={'ev': ev, 'e': nuePDF(ev), 'mubar': nubarmuPDF(ev), 'mu': numuPDF(ev)}, delta_fluxes={'mu': [(29, 1),(236, 0.013)]}, norm=4.9 * (10 ** 7)) ## default unit is /(cm^2*s) if flux_name == 'jsns_delayed': nu_e = np.genfromtxt(pkg_resources.resource_filename(__name__, "data/jsns2/jsns_nu_e.txt"), delimiter=',') nubar_mu = np.genfromtxt(pkg_resources.resource_filename(__name__, "data/jsns2/jsns_nubar_mu.txt"), delimiter=',') norm_nu_e = quad(self.interp_flux, 0, 300, args=(nu_e,))[0] norm_nubar_mu = quad(self.interp_flux, 0, 300, args=(nubar_mu,))[0] def nuePDF(energy): return self.interp_flux(energy, nu_e) / norm_nu_e def nubarmuPDF(energy): return self.interp_flux(energy, nubar_mu) / norm_nubar_mu edges = np.arange(0, 302, 2) # energy bin edges ev = (edges[:-1] + edges[1:]) / 2 return NeutrinoFlux(continuous_fluxes={'ev': ev, 'e': nuePDF(ev), 'mubar': nubarmuPDF(ev)}, norm=3 * (10 ** 7)) if flux_name == 'jsns_prompt': return NeutrinoFlux(delta_fluxes={'mu': [(29, 1),(236, 0.013)]}, norm=1.85 * (10 ** 7)) if flux_name == 'jsns_prompt_continuous': nu_mu = np.genfromtxt(pkg_resources.resource_filename(__name__, "data/jsns2/jsns_nu_mu_nodelta.txt"), delimiter=',') norm_nu_mu = quad(self.interp_flux, 0, 300, args=(nu_mu,))[0] def numuPDF(energy): return self.interp_flux(energy, nu_mu) / norm_nu_mu edges = np.arange(0, 302, 2) # energy bin edges ev = (edges[:-1] + edges[1:]) / 2 return NeutrinoFlux(continuous_fluxes={'ev': ev, 'mu': numuPDF(ev)}, norm=1.85 * (10 ** 4)) if flux_name == 'far_beam_nu': far_beam_txt = 'data/dune_beam_fd_nu_flux_120GeVoptimized.txt' f_beam = np.genfromtxt(pkg_resources.resource_filename(__name__, far_beam_txt), delimiter=',') nu = {'ev': f_beam[:, 0], 'e': f_beam[:, 1], 'mu': f_beam[:, 2], 'ebar': f_beam[:, 4], 'mubar': f_beam[:, 5]} return NeutrinoFlux(continuous_fluxes=nu) if flux_name == 'far_beam_nubar': far_beam_txt = 'data/dune_beam_fd_antinu_flux_120GeVoptimized.txt' f_beam = np.genfromtxt(pkg_resources.resource_filename(__name__, far_beam_txt), delimiter=',') nu = {'ev': f_beam[:, 0], 'e': f_beam[:, 1], 'mu': f_beam[:, 2], 'ebar': f_beam[:, 4], 'mubar': f_beam[:, 5]} return NeutrinoFlux(continuous_fluxes=nu) if flux_name == 'near_beam_nu': far_beam_txt = 'data/dune_beam_nd_nu_flux_120GeVoptimized.txt' f_beam = np.genfromtxt(pkg_resources.resource_filename(__name__, far_beam_txt)) nu = {'ev': f_beam[:, 0], 'e': f_beam[:, 1], 'mu': f_beam[:, 2], 'ebar': f_beam[:, 4], 'mubar': f_beam[:, 5]} return NeutrinoFlux(continuous_fluxes=nu) if flux_name == 'near_beam_nubar': far_beam_txt = 'data/dune_beam_nd_antinu_flux_120GeVoptimized.txt' f_beam = np.genfromtxt(pkg_resources.resource_filename(__name__, far_beam_txt)) nu = {'ev': f_beam[:, 0], 'e': f_beam[:, 1], 'mu': f_beam[:, 2], 'ebar': f_beam[:, 4], 'mubar': f_beam[:, 5]} return NeutrinoFlux(continuous_fluxes=nu) if flux_name == 'atmospheric': if 'zenith' not in kwargs: raise Exception('please specify zenith angle') zen = np.round(kwargs['zenith'], decimals=3) zen_list = np.round(np.linspace(-0.975, 0.975, 40), decimals=3) if zen not in zen_list: print('available choice of zenith angle: ', zen_list) raise Exception('zenith angle not available') idx = (0.975 - zen) / 0.05 * 61 f_atmos = np.genfromtxt(pkg_resources.resource_filename(__name__, 'data/atmos.txt'), delimiter=',') nu = {'ev': f_atmos[int(round(idx)):int(round(idx))+61, 0], 'e': f_atmos[int(round(idx)):int(round(idx))+61, 2], 'mu': f_atmos[int(round(idx)):int(round(idx))+61, 3], 'ebar': f_atmos[int(round(idx)):int(round(idx))+61, 5], 'mubar': f_atmos[int(round(idx)):int(round(idx))+61, 6]} return NeutrinoFlux(continuous_fluxes=nu) class NeutrinoFlux: def __init__(self, continuous_fluxes=None, delta_fluxes=None, norm=1): self.norm = norm * ((100 * meter_by_mev) ** 2) self.ev_min = None self.ev_max = None if continuous_fluxes is None: self.nu = None elif isinstance(continuous_fluxes, dict): self.ev = continuous_fluxes['ev'] sorted_idx = np.argsort(self.ev) self.ev = self.ev[sorted_idx] self.ev_min = self.ev[0] self.ev_max = self.ev[-1] if self.ev_min == 0: raise Exception('flux with neutrino energy equal to zeros is not supported. ' 'please consider using a small value for your lower bound.') self.nu = {'e': continuous_fluxes['e'][sorted_idx] if 'e' in continuous_fluxes else None, 'mu': continuous_fluxes['mu'][sorted_idx] if 'mu' in continuous_fluxes else None, 'tau': continuous_fluxes['tau'][sorted_idx] if 'tau' in continuous_fluxes else None, 'ebar': continuous_fluxes['ebar'][sorted_idx] if 'ebar' in continuous_fluxes else None, 'mubar': continuous_fluxes['mubar'][sorted_idx] if 'mubar' in continuous_fluxes else None, 'taubar': continuous_fluxes['taubar'][sorted_idx] if 'taubar' in continuous_fluxes else None} self.binw = self.ev[1:] - self.ev[:-1] self.precalc = {None: {flr: self.binw*(flx[1:]+flx[:-1])/2 if flx is not None else None for flr, flx in self.nu.items()}} else: raise Exception('only support dict as input.') if delta_fluxes is None: self.delta_nu = None elif isinstance(delta_fluxes, dict): self.delta_nu = {'e': delta_fluxes['e'] if 'e' in delta_fluxes else None, 'mu': delta_fluxes['mu'] if 'mu' in delta_fluxes else None, 'tau': delta_fluxes['tau'] if 'tau' in delta_fluxes else None, 'ebar': delta_fluxes['ebar'] if 'ebar' in delta_fluxes else None, 'mubar': delta_fluxes['mubar'] if 'mubar' in delta_fluxes else None, 'taubar': delta_fluxes['taubar'] if 'taubar' in delta_fluxes else None} for flavor in self.delta_nu: # grab the maximum energy of the delta fluxes if self.delta_nu[flavor] is None: continue energies = [self.delta_nu[flavor][i][0] for i in range(len(self.delta_nu[flavor]))] if self.ev_max is None or max(energies) > self.ev_max: self.ev_max = max(energies) else: raise Exception("'delta_fluxes' must be a dictionary of a list of tuples! e.g. {'e': [(12, 4), (14, 15)], ...}") def __call__(self, ev, flavor): if self.nu is None or self.nu[flavor] is None: return 0 if ev == self.ev_min: return self.nu[flavor][0] * self.norm if ev == self.ev_max: return self.nu[flavor][-1] * self.norm if self.ev_min < ev < self.ev_max: idx = self.ev.searchsorted(ev) l1 = ev - self.ev[idx - 1] l2 = self.ev[idx] - ev h1 = self.nu[flavor][idx - 1] h2 = self.nu[flavor][idx] return (l1*h2+l2*h1)/(l1+l2) * self.norm return 0 def integrate(self, ea, eb, flavor, weight_function=None): """ Please avoid using lambda as your weight_function!!! :param ea: :param eb: :param flavor: :param weight_function: :return: """ if eb <= ea: return 0 res = 0 if self.delta_nu is not None and self.delta_nu[flavor] is not None: for deltas in self.delta_nu[flavor]: if ea < deltas[0] <= eb: # self.ev_max should be included with <= res += deltas[1] if weight_function is None else deltas[1]*weight_function(deltas[0]) if self.nu is not None and self.nu[flavor] is not None: if weight_function not in self.precalc: weight = weight_function(self.ev) self.precalc[weight_function] = {flr: self.binw*((flx*weight)[1:]+(flx*weight)[:-1])/2 if flx is not None else None for flr, flx in self.nu.items()} eb = min(eb, self.ev_max) ea = max(ea, self.ev_min) idxmin = self.ev.searchsorted(ea, side='right') idxmax = self.ev.searchsorted(eb, side='left') if idxmin == idxmax: l1 = ea - self.ev[idxmin - 1] l2 = self.ev[idxmin] - ea h1 = self.nu[flavor][idxmin - 1] * weight_function(self.ev[idxmin - 1]) \ if weight_function is not None else self.nu[flavor][idxmin - 1] h2 = self.nu[flavor][idxmin] * weight_function(self.ev[idxmin]) \ if weight_function is not None else self.nu[flavor][idxmin] ha = (l1*h2+l2*h1)/(l1+l2) l1 = eb - self.ev[idxmax - 1] l2 = self.ev[idxmax] - eb hb = (l1*h2+l2*h1)/(l1+l2) return (ha + hb) * (eb - ea) / 2 * self.norm res += np.sum(self.precalc[weight_function][flavor][idxmin:idxmax-1]) l1 = ea - self.ev[idxmin-1] l2 = self.ev[idxmin] - ea h1 = self.nu[flavor][idxmin-1]*weight_function(self.ev[idxmin-1]) \ if weight_function is not None else self.nu[flavor][idxmin-1] h2 = self.nu[flavor][idxmin]*weight_function(self.ev[idxmin]) \ if weight_function is not None else self.nu[flavor][idxmin] res += ((l1*h2+l2*h1)/(l1+l2)+h2)*l2/2 l1 = eb - self.ev[idxmax - 1] l2 = self.ev[idxmax] - eb h1 = self.nu[flavor][idxmax - 1] * weight_function(self.ev[idxmax - 1]) \ if weight_function is not None else self.nu[flavor][idxmax-1] h2 = self.nu[flavor][idxmax] * weight_function(self.ev[idxmax]) \ if weight_function is not None else self.nu[flavor][idxmax] res += ((l1 * h2 + l2 * h1) / (l1 + l2) + h1) * l1 / 2 return res * self.norm def change_parameters(self): pass class DMFlux: """ Dark matter flux at COHERENT """ def __init__(self, dark_photon_mass, life_time, coupling_quark, dark_matter_mass, detector_distance=19.3, pot_mu=0.75, pot_sigma=0.25, size=100000, mono_energy=None): """ initialize and generate flux :param dark_photon_mass: dark photon mass :param life_time: life time of dark photon in rest frame, unit in micro second :param coupling_quark: dark photon coupling to quarks :param dark_matter_mass: mass of dark matter, unit in MeV :param detector_distance: distance from the detector to the Hg target :param pot_mu: mean of guassian distribution of proton on target, unit in micro second :param pot_sigma: std of guassian distribution of proton on target, unit in micro second :param size: size of sampling dark photons """ self.dp_m = dark_photon_mass self.dm_m = dark_matter_mass self.epsi_quark = coupling_quark self.det_dist = detector_distance / meter_by_mev self.dp_life = life_time * 1e-6 * c_light / meter_by_mev self.pot_mu = pot_mu * 1e-6 * c_light / meter_by_mev self.pot_sigma = pot_sigma * 1e-6 * c_light / meter_by_mev if mono_energy is None: self.timing, self.energy = self._generate(size) else: self.timing, self.energy = self._mono_flux(mono_energy, pot_mu) self.ed_min = self.energy.min() self.ed_max = self.energy.max() self.dm_norm = self.epsi_quark**2*0.23*1e20 / (4*np.pi*(detector_distance**2)*24*3600) * (meter_by_mev**2) * \ self.timing.shape[0] * 2 / size def _generate(self, size=1000000): """ generate dark matter flux at COHERENT :param size: size of sampling dark photons :return: time and energy histogram of dark matter """ dp_m = self.dp_m dp_e = ((massofpi+massofp)**2 - massofn**2 + dp_m**2)/(2*(massofpi+massofp)) dp_p = np.sqrt(dp_e ** 2 - dp_m ** 2) dp_v = dp_p / dp_e gamma = dp_e / dp_m tau = self.dp_life * gamma tf = np.random.normal(self.pot_mu, self.pot_sigma, size) # POT t = np.random.exponential(tau, size) # life time of each dark photon cs = np.random.uniform(-1, 1, size) # direction of each dark photon # in rest frame estar = dp_m / 2 pstar = np.sqrt(estar ** 2 - self.dm_m ** 2) pstarx = pstar * cs pstary = pstar * np.sqrt(1 - cs ** 2) # boost to lab frame elab = gamma * (estar + dp_v * pstarx) plabx = gamma * (pstarx + dp_v * estar) plaby = pstary vx = plabx / elab vy = plaby / elab timing = [] energy = [] for i in range(size): a = vx[i] ** 2 + vy[i] ** 2 b = 2 * vx[i] * t[i] * dp_v cc = dp_v ** 2 * t[i] ** 2 - self.det_dist ** 2 if b ** 2 - 4 * a * cc >= 0: if (-b - np.sqrt(b ** 2 - 4 * a * cc)) / (2 * a) > 0: timing.append((-b - np.sqrt(b ** 2 - 4 * a * cc)) / (2 * a) + t[i] + tf[i]) energy.append(elab[i]) if (-b + np.sqrt(b ** 2 - 4 * a * cc)) / (2 * a) > 0: timing.append((-b + np.sqrt(b ** 2 - 4 * a * cc)) / (2 * a) + t[i] + tf[i]) energy.append(elab[i]) return np.array(timing) / c_light * meter_by_mev * 1e6, np.array(energy) def _mono_flux(self, e_chi, t_trig, size=1000): return np.random.normal(loc=t_trig, scale=0.01*t_trig, size=size), np.random.normal(loc=e_chi, scale=0.005*e_chi, size=size) def flux(self, ev): """ dark matter flux :param ev: dark matter energy :return: dark matter flux """ return 1/(self.ed_max-self.ed_min)*self.dm_norm if self.ed_min <= ev <= self.ed_max else 0 def fint(self, er, m, **kwargs): """ flux/(ex^2-mx^2) integration :param er: recoil energy in MeV :param m: target nucleus mass in MeV :param kwargs: other argument :return: flux/(ex^2-mx^2) integration """ emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) emin = 0.0 * emin def integrand(ex): return self.flux(ex)/(ex**2 - self.dm_m**2) if not isinstance(emin, np.ndarray): res = quad(integrand, emin, self.ed_max)[0] else: res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = quad(integrand, emin[i], self.ed_max)[0] return res def fint1(self, er, m, **kwargs): """ flux*ex/(ex^2-mx^2) integration :param er: recoil energy in MeV :param m: target nucleus mass in MeV :param kwargs: other argument :return: flux*ex/(ex^2-mx^2) integration """ emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) emin = 0.0 * emin def integrand(ex): return self.flux(ex) * ex / (ex ** 2 - self.dm_m ** 2) if not isinstance(emin, np.ndarray): res = quad(integrand, emin, self.ed_max)[0] else: res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = quad(integrand, emin[i], self.ed_max)[0] return res def fint2(self, er, m, **kwargs): """ flux*ex^2/(ex^2-mx^2) integration :param er: recoil energy in MeV :param m: target nucleus mass in MeV :param kwargs: other argument :return: flux*ex^2/(ex^2-mx^2) integration """ emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) emin = 0.0 * emin def integrand(ex): return self.flux(ex) * ex**2 / (ex ** 2 - self.dm_m ** 2) if not isinstance(emin, np.ndarray): res = quad(integrand, emin, self.ed_max)[0] else: res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = quad(integrand, emin[i], self.ed_max)[0] return res class DMFluxIsoPhoton(FluxBaseContinuous): def __init__(self, photon_distribution, dark_photon_mass, coupling, dark_matter_mass, life_time=0.001, detector_distance=19.3, pot_rate=5e20, pot_sample=100000, brem_suppress=True, pot_mu=0.7, pot_sigma=0.15, sampling_size=100, nbins=20, verbose=False): self.nbins = nbins self.photon_flux = photon_distribution self.dp_m = dark_photon_mass self.dm_m = dark_matter_mass self.epsilon = coupling self.life_time = life_time # input in mus, internal in s self.det_dist = detector_distance # meters self.pot_rate = pot_rate # the number of POT/day in the experiment self.pot_mu = pot_mu self.pot_sigma = pot_sigma self.pot_sample = pot_sample # the number of POT in photon_distribution self.time = [] self.energy = [] self.weight = [] self.norm = 1 self.sampling_size = sampling_size self.supp = brem_suppress # add phase space suppression self.verbose = verbose for photon_events in photon_distribution: if self.verbose: print("getting photons from E =", photon_events[0], "Size =", photon_events[1]) self._generate_single(photon_events, self.sampling_size) normalization = self.epsilon ** 2 * (self.pot_rate / self.pot_sample) \ / (4 * np.pi * (self.det_dist ** 2) * 24 * 3600) * (meter_by_mev**2) self.norm = normalization self.weight = [x * self.norm for x in self.weight] self.timing = np.array(self.time) * 1e6 hist, bin_edges = np.histogram(self.energy, bins=nbins, weights=self.weight, density=True) super().__init__((bin_edges[:-1] + bin_edges[1:]) / 2, hist, norm=np.sum(self.weight)) def getScaledWeights(self): wgt = self.weight wgt = [x * self.norm * 24 * 3600 / (meter_by_mev**2) for x in wgt] return wgt def simulate(self): self.time = [] self.energy = [] self.weight = [] normalization = self.epsilon ** 2 * (self.pot_rate / self.pot_sample) \ / (4 * np.pi * (self.det_dist ** 2) * 24 * 3600) * (meter_by_mev**2) self.norm = normalization for photon_events in self.photon_flux: if self.verbose: print("getting photons from E =", photon_events[0], "Size =", photon_events[1]) self._generate_single(photon_events, self.sampling_size) self.weight = [x * self.norm for x in self.weight] self.timing = np.array(self.time) * 1e6 hist, bin_edges = np.histogram(self.energy, bins=self.nbins, weights=self.weight, density=True) super().__init__((bin_edges[:-1] + bin_edges[1:]) / 2, hist, norm=np.sum(self.weight)) def _generate_single(self, photon_events, nsamples): # Initiate photon position, energy and momentum. if photon_events[0]**2 < self.dp_m**2: return dp_m = self.dp_m dp_e = photon_events[0] dp_p = np.sqrt(dp_e ** 2 - self.dp_m ** 2) dp_momentum = np.array([dp_e, 0, 0, dp_p]) # dark photon to dark matter dm_m = self.dm_m dm_e = self.dp_m / 2 dm_p = np.sqrt(dm_e ** 2 - dm_m ** 2) # Directional sampling. dp_wgt = photon_events[1] / nsamples # Event weight # Brem suppression if self.supp == True: el_e = 1.0773*dp_e + 13.716 # most likely electron energy that produced this dark photon supp_fact = min(1, 1154 * np.exp(-24.42 * np.power(dp_m/el_e, 0.3174))) dp_wgt *= supp_fact ## optimize #pos = np.zeros(3) ## optimize t = np.random.normal(self.pot_mu * 1e-6, self.pot_sigma * 1e-6, nsamples) t_dp = np.random.exponential(1e-6 * self.life_time * dp_momentum[0] / dp_m, nsamples) t += t_dp csd = np.random.uniform(-1, 1, nsamples) phid = np.random.uniform(0, 2 * np.pi, nsamples) boost_matr = lorentz_matrix(np.array([-dp_momentum[1] / dp_momentum[0], -dp_momentum[2] / dp_momentum[0], -dp_momentum[3] / dp_momentum[0]])) pos_z = c_light * t_dp * dp_momentum[3] / dp_momentum[0] # position is along z by construction for i in range(nsamples): dm_momentum = np.array([dm_e, dm_p * np.sqrt(1 - csd[i] ** 2) * np.cos(phid[i]), dm_p * np.sqrt(1 - csd[i] ** 2) * np.sin(phid[i]), dm_p * csd[i]]) dm_momentum = boost_matr @ dm_momentum # dark matter arrives at detector, assuming azimuthal symmetric # append the time and energy spectrum of the DM. # DM particle 1 v = dm_momentum[1:] / dm_momentum[0] * c_light a = v[0]*v[0] + v[1]*v[1] + v[2]*v[2] #np.sum(v ** 2) b = 2*v[2]*pos_z[i] # dot product is along z by construction c = pos_z[i]**2 - self.det_dist ** 2 if b ** 2 - 4 * a * c >= 0: t_dm = (-b - np.sqrt(b ** 2 - 4 * a * c)) / (2 * a) if t_dm >= 0: if self.verbose: print("adding weight", dp_wgt) self.time.append(t[i]+t_dm) self.energy.append(dm_momentum[0]) self.weight.append(dp_wgt) t_dm = (-b + np.sqrt(b ** 2 - 4 * a * c)) / (2 * a) if t_dm >= 0: if self.verbose: print("adding weight", dp_wgt) self.time.append(t[i]+t_dm) self.energy.append(dm_momentum[0]) self.weight.append(dp_wgt) # DM particle 2 v = (dp_momentum - dm_momentum)[1:] / (dp_momentum - dm_momentum)[0] * c_light a = v[0]*v[0] + v[1]*v[1] + v[2]*v[2] #np.sum(v ** 2) b = b = 2*v[2]*pos_z[i] c = pos_z[i]**2 - self.det_dist ** 2 if b ** 2 - 4 * a * c >= 0: t_dm = (-b - np.sqrt(b ** 2 - 4 * a * c)) / (2 * a) if t_dm >= 0: if self.verbose: print("adding weight", dp_wgt) self.time.append(t[i]+t_dm) self.energy.append((dp_momentum - dm_momentum)[0]) self.weight.append(dp_wgt) t_dm = (-b + np.sqrt(b ** 2 - 4 * a * c)) / (2 * a) if t_dm >= 0: if self.verbose: print("adding weight", dp_wgt) self.time.append(t[i]+t_dm) self.energy.append((dp_momentum - dm_momentum)[0]) self.weight.append(dp_wgt) def fint(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f0) return res def fint1(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f1) return res def fint2(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f2) return res def f0(self, ev): return 1/(ev**2 - self.dm_m**2) def f1(self, ev): return ev/(ev**2 - self.dm_m**2) def f2(self, ev): return ev**2 / (ev**2 - self.dm_m**2) class DMFluxFromPiMinusAbsorption: r""" Dark matter flux from pi^- + p -> A^\prime + n -> \chi + \chi + n """ def __init__(self, dark_photon_mass, coupling_quark, dark_matter_mass, life_time=0.001, detector_distance=19.3, pot_rate=5e20, pot_mu=0.7, pot_sigma=0.15, pion_rate=18324/500000, sampling_size=100000): """ initialize and generate flux default values are COHERENT experiment values :param dark_photon_mass: dark photon mass :param life_time: life time of dark photon in rest frame, unit in micro second :param coupling_quark: dark photon coupling to quarks divided by electron charge :param dark_matter_mass: mass of dark matter, unit in MeV :param detector_distance: distance from the detector to the target :param pot_rate: proton on target rate, unit POT/day :param pot_mu: mean of guassian distribution of proton on target, unit in micro second :param pot_sigma: std of guassian distribution of proton on target, unit in micro second :param pion_rate: pi^- production rate :param sampling_size: size of sampling dark photons """ self.dp_m = dark_photon_mass self.dm_m = dark_matter_mass self.epsi_quark = coupling_quark self.det_dist = detector_distance / meter_by_mev self.life_time = life_time # input in mus, internal in s self.pot_mu = pot_mu self.pot_sigma = pot_sigma self.pot_rate = pot_rate self.pion_rate = pion_rate self.sampling_size = sampling_size self.timing = [] self.energy = [] self.ed_min = None self.ed_max = None self.norm = None self.simulate() self.ev_min = self.ed_min self.ev_max = self.ed_max def get_lifetime(self, g, m): return ((16 * np.pi ** 2) / ((g ** 2) * m)) * mev_per_hz def simulate(self): """ generate dark matter flux """ # First check that the dp mass is less than the pi- mass. if self.dp_m > massofpi: self.norm = 0.0 return dp_m = self.dp_m dp_e = ((massofpi + massofp) ** 2 - massofn ** 2 + dp_m ** 2) / (2 * (massofpi + massofp)) dp_p = np.sqrt(dp_e ** 2 - dp_m ** 2) dp_v = dp_p / dp_e gamma = dp_e / dp_m tau = (self.life_time * 1e-6 * c_light / meter_by_mev) * gamma tf = np.random.normal(self.pot_mu * 1e-6 * c_light / meter_by_mev, self.pot_sigma * 1e-6 * c_light / meter_by_mev, self.sampling_size) # POT t = np.random.exponential(tau, self.sampling_size) # life time of each dark photon cs = np.random.uniform(-1, 1, self.sampling_size) # direction of each dark photon # in rest frame estar = dp_m / 2 pstar = np.sqrt(estar ** 2 - self.dm_m ** 2) pstarx = pstar * cs pstary = pstar * np.sqrt(1 - cs ** 2) # boost to lab frame elab = gamma * (estar + dp_v * pstarx) plabx = gamma * (pstarx + dp_v * estar) plaby = pstary vx = plabx / elab vy = plaby / elab timing = [] energy = [] for i in range(self.sampling_size): a = vx[i] ** 2 + vy[i] ** 2 b = 2 * vx[i] * t[i] * dp_v cc = dp_v ** 2 * t[i] ** 2 - self.det_dist ** 2 if b ** 2 - 4 * a * cc >= 0: if (-b - np.sqrt(b ** 2 - 4 * a * cc)) / (2 * a) > 0: timing.append((-b - np.sqrt(b ** 2 - 4 * a * cc)) / (2 * a) + t[i] + tf[i]) energy.append(elab[i]) if (-b + np.sqrt(b ** 2 - 4 * a * cc)) / (2 * a) > 0: timing.append((-b + np.sqrt(b ** 2 - 4 * a * cc)) / (2 * a) + t[i] + tf[i]) energy.append(elab[i]) self.timing = np.array(timing) / c_light * meter_by_mev * 1e6 self.energy = np.array(energy) self.ed_min = min(energy) self.ed_max = max(energy) self.ev_min = self.ed_min self.ev_max = self.ed_max self.norm = self.epsi_quark ** 2 * self.pot_rate * self.pion_rate / (4 * np.pi * (self.det_dist ** 2) * 24 * 3600) * \ self.timing.shape[0] * 2 / self.sampling_size def __call__(self, ev): """ dark matter flux, the spectrum is flat because of isotropic :param ev: dark matter energy :return: dark matter flux """ return 1 / (self.ed_max - self.ed_min) * self.norm if self.ed_min <= ev <= self.ed_max else 0 def integrate(self, ea, eb, weight_function=None): """ adaptive quadrature can achieve almost linear time on simple weight function, no need to do precalculation :param ea: lowerbound :param eb: upperbound :param weight_function: weight function :return: integration of the flux, weighted by the weight function """ if eb <= ea: return 0 eb = min(eb, self.ed_max) ea = max(ea, self.ed_min) if weight_function is None: return (eb - ea) / (self.ed_max - self.ed_min) * self.norm return quad(weight_function, ea, eb, epsrel=1e-3)[0] / (self.ed_max - self.ed_min) * self.norm def change_parameters(self, dark_photon_mass=None, life_time=None, coupling_quark=None, dark_matter_mass=None, detector_distance=None, pot_rate=None, pot_mu=None, pot_sigma=None, pion_rate=None, sampling_size=None): self.dp_m = dark_photon_mass if dark_photon_mass is not None else self.dp_m self.dp_life = life_time * 1e-6 * c_light / meter_by_mev if life_time is not None else self.dp_life self.epsi_quark = coupling_quark if coupling_quark is not None else self.epsi_quark self.dm_m = dark_matter_mass if dark_matter_mass is not None else self.dm_m self.det_dist = detector_distance / meter_by_mev if detector_distance is not None else self.det_dist self.pot_rate = pot_rate if pot_rate is not None else self.pot_rate self.pot_mu = pot_mu * 1e-6 * c_light / meter_by_mev if pot_mu is not None else self.pot_mu self.pot_sigma = pot_sigma * 1e-6 * c_light / meter_by_mev if pot_sigma is not None else self.pot_sigma self.pion_rate = self.pion_rate if pion_rate is not None else self.pion_rate self.sampling_size = sampling_size if sampling_size is not None else self.sampling_size self.simulate() def fint(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f0) return res def fint1(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f1) return res def fint2(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f2) return res def f0(self, ev): return 1/(ev**2 - self.dm_m**2) def f1(self, ev): return ev/(ev**2 - self.dm_m**2) def f2(self, ev): return ev**2 / (ev**2 - self.dm_m**2) class DMFluxFromPi0Decay(FluxBaseContinuous): """ z direction is the direction of the beam """ def __init__(self, pi0_distribution, dark_photon_mass, coupling_quark, dark_matter_mass, meson_mass=massofpi0, life_time=0.001, detector_distance=19.3, detector_direction=0, detector_width=0.1, pot_rate=5e20, pot_mu=0.7, pot_sigma=0.15, pion_rate=52935/500000, nbins=20): self.pi0_distribution = pi0_distribution self.dp_m = dark_photon_mass self.life_time = life_time self.epsilon = coupling_quark # input in mus, internal in s self.dm_m = dark_matter_mass self.meson_mass = meson_mass self.det_dist = detector_distance self.det_direc = detector_direction self.det_width = detector_width self.pot_rate = pot_rate self.pot_mu = pot_mu self.pot_sigma = pot_sigma self.pion_rate = pion_rate self.time = [] self.energy = [] self.nbins = nbins self.dm_m = dark_matter_mass for pi0_events in pi0_distribution: # must be in the form [azimuth, cos(zenith), kinetic energy] self._generate_single(pi0_events) self.timing = np.array(self.time)*1e6 hist, bin_edges = np.histogram(self.energy, bins=nbins, density=True) ps_factor = np.heaviside(self.meson_mass - self.dp_m, 0.0) * 2 * self.epsilon**2 * (1 - (self.dp_m / self.meson_mass)**2)**3 super().__init__((bin_edges[:-1]+bin_edges[1:])/2, hist, norm=ps_factor*pot_rate*pion_rate*len(self.time)/len(pi0_distribution)/ (2*np.pi*(min(1.0, detector_direction+detector_width/2)-max(-1.0, detector_direction-detector_width/2))*detector_distance**2*24*3600) *(meter_by_mev**2)) def get_lifetime(self, g, m): return ((16 * np.pi ** 2) / ((g ** 2) * m)) * mev_per_hz def simulate(self): self.time = [] self.energy = [] for pi0_events in self.pi0_distribution: # must be in the form [azimuth, cos(zenith), kinetic energy] self._generate_single(pi0_events) self.timing = np.array(self.time)*1e6 hist, bin_edges = np.histogram(self.energy, bins=self.nbins, density=True) ps_factor = np.heaviside(self.meson_mass - self.dp_m, 0.0) * 2 * self.epsilon**2 * (1 - (self.dp_m / self.meson_mass)**2)**3 norm = ps_factor * self.pot_rate * self.pion_rate * \ len(self.time)/len(self.pi0_distribution)/ \ (2*np.pi*(min(1.0, self.det_direc+self.det_width/2)-max(-1.0, self.det_direc-self.det_width/2))*self.det_dist**2*24*3600)*(meter_by_mev**2) super().__init__((bin_edges[:-1]+bin_edges[1:])/2, hist, norm=norm) def _generate_single(self, pi0_events): if self.dp_m > self.meson_mass: return pos = np.zeros(3) t = 0 t += np.random.normal(self.pot_mu * 1e-6, self.pot_sigma * 1e-6) pi_e = self.meson_mass + pi0_events[2] pi_p = np.sqrt(pi_e**2 - self.meson_mass**2) pi_v = pi_p / pi_e t_pi = np.random.exponential(8.4e-17*pi_e/self.meson_mass) pos += pi_v * polar_to_cartesian(pi0_events[:2]) * t_pi * c_light t += t_pi # pi0 to dark photon dp_m = self.dp_m dp_e = (self.meson_mass**2 + dp_m**2)/(2*self.meson_mass) dp_p = (self.meson_mass**2 - dp_m**2)/(2*self.meson_mass) cs = np.random.uniform(-1, 1) phi = np.random.uniform(0, 2*np.pi) dp_momentum = np.array([dp_e, dp_p*np.sqrt(1-cs**2)*np.cos(phi), dp_p*np.sqrt(1-cs**2)*np.sin(phi), dp_p*cs]) dp_momentum = lorentz_boost(dp_momentum, -pi_v*polar_to_cartesian(pi0_events[:2])) t_dp = np.random.exponential((self.life_time*1e-6)*dp_momentum[0]/dp_m) pos += c_light*t_dp*np.array([dp_momentum[1]/dp_momentum[0], dp_momentum[2]/dp_momentum[0], dp_momentum[3]/dp_momentum[0]]) t += t_dp # dark photon to dark matter dm_m = self.dm_m dm_e = dp_m / 2 dm_p = np.sqrt(dm_e**2 - dm_m**2) csd = np.random.uniform(-1, 1) phid = np.random.uniform(0, 2*np.pi) dm_momentum = np.array([dm_e, dm_p*np.sqrt(1-csd**2)*np.cos(phid), dm_p*np.sqrt(1-csd**2)*np.sin(phid), dm_p*csd]) dm_momentum = lorentz_boost(dm_momentum, np.array([-dp_momentum[1]/dp_momentum[0], -dp_momentum[2]/dp_momentum[0], -dp_momentum[3]/dp_momentum[0]])) # dark matter arrives at detector, assuming azimuthal symmetric v = dm_momentum[1:]/dm_momentum[0]*c_light a = np.sum(v**2) b = 2*np.sum(v*pos) #2 * v[2] * (c_light * dp_p / dp_e) * t_dp c = np.sum(pos**2) - self.det_dist**2 if b**2 - 4*a*c >= 0: t_dm = (-b+np.sqrt(b**2-4*a*c))/(2*a) if t_dm >= 0: #and self.det_direc-self.det_width/2 <= (pos[2]+v[2]*t_dm)/np.sqrt(np.sum((v*t_dm + pos)**2)) <= self.det_direc+self.det_width/2: self.time.append(t+t_dm) self.energy.append(dm_momentum[0]) t_dm = (-b-np.sqrt(b**2-4*a*c))/(2*a) if t_dm >= 0: #and self.det_direc-self.det_width/2 <= (pos[2]+v[2]*t_dm)/np.sqrt(np.sum((v*t_dm + pos)**2)) <= self.det_direc+self.det_width/2: self.time.append(t+t_dm) self.energy.append(dm_momentum[0]) v = (dp_momentum-dm_momentum)[1:]/(dp_momentum-dm_momentum)[0]*c_light a = np.sum(v**2) b = 2*np.sum(v*pos) c = np.sum(pos**2) - self.det_dist**2 if b**2 - 4*a*c >= 0: t_dm = (-b+np.sqrt(b**2-4*a*c))/(2*a) if t_dm >= 0: #and self.det_direc-self.det_width/2 <= (pos[2]+v[2]*t_dm)/np.sqrt(np.sum((v*t_dm + pos)**2)) <= self.det_direc+self.det_width/2: self.time.append(t+t_dm) self.energy.append((dp_momentum-dm_momentum)[0]) t_dm = (-b-np.sqrt(b**2-4*a*c))/(2*a) if t_dm >= 0: #and self.det_direc-self.det_width/2 <= (pos[2]+v[2]*t_dm)/np.sqrt(np.sum((v*t_dm + pos)**2)) <= self.det_direc+self.det_width/2: self.time.append(t+t_dm) self.energy.append((dp_momentum-dm_momentum)[0]) def to_pandas(self): return pd.DataFrame({'time': self.time, 'energy': self.energy}) def fint(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f0) return res def fint1(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f1) return res def fint2(self, er, m): if np.isscalar(m): m = np.array([m]) emin = 0.5 * (np.sqrt((er**2*m+2*er*m**2+2*er*self.dm_m**2+4*m*self.dm_m**2)/m) + er) res = np.zeros_like(emin) for i in range(emin.shape[0]): res[i] = self.integrate(emin[i], self.ev_max, weight_function=self.f2) return res def f0(self, ev): return 1/(ev**2 - self.dm_m**2) def f1(self, ev): return ev/(ev**2 - self.dm_m**2) def f2(self, ev): return ev**2 / (ev**2 - self.dm_m**2)
48.09542
163
0.53164
9,088
63,005
3.53026
0.06206
0.01945
0.010255
0.009475
0.761712
0.713431
0.684225
0.659415
0.624443
0.6001
0
0.046309
0.330291
63,005
1,309
164
48.132162
0.714042
0.117546
0
0.590864
0
0.000993
0.035546
0.007396
0
0
0
0
0
1
0.072493
false
0.000993
0.004965
0.028798
0.193644
0.00993
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
961ccfb0c6fb46c865492bed7af363f36b450b4b
1,239
py
Python
utils/checks.py
JDJGInc/JDBot
057bcc5c80452c9282606e9bf66219e614aac5e1
[ "MIT" ]
12
2021-01-09T06:17:51.000Z
2022-03-18T06:30:15.000Z
utils/checks.py
JDJGInc/JDBot
057bcc5c80452c9282606e9bf66219e614aac5e1
[ "MIT" ]
21
2021-03-21T16:43:45.000Z
2022-02-01T16:02:26.000Z
utils/checks.py
JDJGInc/JDBot
057bcc5c80452c9282606e9bf66219e614aac5e1
[ "MIT" ]
25
2021-03-21T16:33:56.000Z
2022-03-12T16:52:25.000Z
import discord def check(ctx): def inner(m): return m.author == ctx.author return inner def Membercheck(ctx): def inner(m): return m.author == ctx.guild.me return inner def warn_permission(ctx, Member): if isinstance(ctx.channel, discord.TextChannel): return ctx.author.guild_permissions.manage_messages and ctx.author.top_role > Member.top_role and ctx.author.guild_permissions >= Member.guild_permissions #bug with user with same permissions maybe and other stuff(seems fixed for right now, leaving note just in case.) if isinstance(ctx.channel, discord.DMChannel): return True def cleanup_permission(ctx): if isinstance(ctx.channel, discord.TextChannel): return ctx.author.guild_permissions.manage_messages if isinstance(ctx.channel, discord.DMChannel): return True def mutual_guild_check(ctx, user): mutual_guilds = set(ctx.author.mutual_guilds) mutual_guilds2 = set(user.mutual_guilds) return bool(mutual_guilds.intersection(mutual_guilds2)) async def filter_commands(ctx, command_list): async def check(cmd, ctx): try: return await cmd.can_run(ctx) except: return False return [cmd for cmd in command_list if await check(cmd, ctx)]
27.533333
158
0.742534
176
1,239
5.102273
0.357955
0.060134
0.066815
0.097996
0.365256
0.365256
0.365256
0.365256
0.302895
0.18931
0
0.001946
0.170299
1,239
45
159
27.533333
0.871595
0.090395
0
0.333333
0
0
0
0
0
0
0
0
0
1
0.233333
false
0
0.033333
0.066667
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2