hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7abf0dedd7528e5e8c658c425a2c635f12f405fd | 874 | py | Python | flask-api/api/models/rating.py | reciprep/reciprep-dev | 73c9e7084c7797ed7ce51c624f527c52ee3a5d87 | [
"MIT"
] | null | null | null | flask-api/api/models/rating.py | reciprep/reciprep-dev | 73c9e7084c7797ed7ce51c624f527c52ee3a5d87 | [
"MIT"
] | 5 | 2017-03-07T23:20:30.000Z | 2017-03-07T23:22:10.000Z | flask-api/api/models/rating.py | reciprep/reciprep-server | 73c9e7084c7797ed7ce51c624f527c52ee3a5d87 | [
"MIT"
] | null | null | null | import uuid
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.dialects.postgresql import ARRAY
from api import app, db, bcrypt
from api.models.user import User
from api.models.recipe import Recipe
class Rating(db.Model):
__tablename__ = 'ratings'
id = db.Column(UUID(as_uuid=True), primary_key=True, default=lambda: uuid.uuid4().hex)
user_id = db.Column(UUID(as_uuid=True), db.ForeignKey('users.id'))
recipe_id = db.Column(UUID(as_uuid=True), db.ForeignKey('recipes.id'))
value = db.Column(db.Float, nullable=False)
user = db.relationship(User, backref='user_ratings')
recipe = db.relationship(Recipe, backref='rating_users')
def __init__(self, user=None, recipe=None, value=0):
self.user = user
self.recipe = recipe
self.value = value
| 33.615385 | 90 | 0.729977 | 123 | 874 | 5.04878 | 0.382114 | 0.05153 | 0.048309 | 0.067633 | 0.276973 | 0.154589 | 0.154589 | 0.115942 | 0.115942 | 0 | 0 | 0.002706 | 0.154462 | 874 | 25 | 91 | 34.96 | 0.837618 | 0 | 0 | 0 | 0 | 0 | 0.056064 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.368421 | 0 | 0.842105 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
8f81f4cfe87bac50670acf3a61610147f3e9fb75 | 341 | py | Python | eisenmann-backend/product/views/product_entry_view.py | RubenRodrigo/Eisenmann-Inventory | 5abb59a11d43987db3d1742b6f0978c6aa6ee81d | [
"MIT"
] | null | null | null | eisenmann-backend/product/views/product_entry_view.py | RubenRodrigo/Eisenmann-Inventory | 5abb59a11d43987db3d1742b6f0978c6aa6ee81d | [
"MIT"
] | null | null | null | eisenmann-backend/product/views/product_entry_view.py | RubenRodrigo/Eisenmann-Inventory | 5abb59a11d43987db3d1742b6f0978c6aa6ee81d | [
"MIT"
] | null | null | null | # DRF
from rest_framework import viewsets
# Models
from product.models.product_entry import ProductEntry
# Serializers
from product.serializers.product_entry_serializer import ProductEntrySerializer
class ProductEntryViewSet(viewsets.ModelViewSet):
queryset = ProductEntry.objects.all()
serializer_class = ProductEntrySerializer
| 24.357143 | 79 | 0.83871 | 34 | 341 | 8.264706 | 0.558824 | 0.078292 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111437 | 341 | 13 | 80 | 26.230769 | 0.927393 | 0.064516 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
8f93b66dde157acbb6af2fc0a8acb61534446a20 | 2,252 | py | Python | .templates_py/PipelineModule.py | lnilya/sammie | 656b57ecef67923228f866406385b767a7099e3d | [
"BSD-3-Clause"
] | 2 | 2022-02-03T20:03:54.000Z | 2022-02-03T20:14:37.000Z | .templates_py/PipelineModule.py | lnilya/sammie | 656b57ecef67923228f866406385b767a7099e3d | [
"BSD-3-Clause"
] | null | null | null | .templates_py/PipelineModule.py | lnilya/sammie | 656b57ecef67923228f866406385b767a7099e3d | [
"BSD-3-Clause"
] | null | null | null | from src.sammie.py.modules.ModuleBase import ModuleBase
class __NAME__Keys:
"""Convenience class to access the keys as named entities rather than in an array"""
inSomeInputKey: str
outSomeOutputKey: str
def __init__(self, inputs, outputs):
self.inSomeInputKey = inputs[0]
self.outSomeOutputKey = outputs[0]
class __NAME__(ModuleBase):
keys: __NAME__Keys
def __init__(self,*args,**kwargs):
super().__init__(*args,**kwargs)
self.log = '__NAME__'
self.trace('initialized')
def unpackParams(self,paramName1,paramName2,**other):
"""unpack and possibly parse/cast all parameters coming from JS. The parameters from JS are defined in the params.tsx file of the respective step.
The arrive as a dictionary on the py side and sometimes need some parsing. In any way this function provides a simple method to extract
these parameters as named variables rather than using params['paramName1'] you can run it through this function."""
#
#respective
return paramName1[0],paramName2
def run(self, action, params, inputkeys,outputkeys):
self.keys = __NAME__Keys(inputkeys, outputkeys)
#This is a stub and simply displays best practices on how to structure this function. Feel free to change it
if action == 'apply':
#Parse Parameters out of the dictionary arriving from JS
param1, param2 = self.unpackParams(**params)
#get the input that this step is working on
someInput = self.session.getData(self.keys.inPreprocessedImg)
#do something with it...
#Required: Notify the pipeline that the processed data is now available, so that the user can step to the next step
#of the UI.
self.onGeneratedData(self.keys.outBorderedImage, someInput, params)
#Generate an output that will go to javascript for displaying on the UI side
return {'demoResult':'Somethign for JS'}
def exportData(self, key: str, path: str, **args):
#Get the data that needs to be exported
data = self.session.getData(key)
#Write a file with this data or postprocess it in some way
#...
| 39.508772 | 154 | 0.674067 | 294 | 2,252 | 5.054422 | 0.496599 | 0.016151 | 0.014805 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005942 | 0.252664 | 2,252 | 56 | 155 | 40.214286 | 0.877005 | 0.446714 | 0 | 0 | 0 | 0 | 0.041425 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.208333 | false | 0 | 0.041667 | 0 | 0.541667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
8f95bfd1b9620b0dbb4a2ac282872ee79ea87f6c | 4,317 | py | Python | xii/assembler/ufl_utils.py | MiroK/fenics_ii | 58c41f0e8dba720962830395851e081b057269cc | [
"MIT"
] | 10 | 2017-06-22T21:05:17.000Z | 2020-09-25T08:36:59.000Z | xii/assembler/ufl_utils.py | MiroK/fenics_ii | 58c41f0e8dba720962830395851e081b057269cc | [
"MIT"
] | 2 | 2018-04-14T08:43:59.000Z | 2018-09-19T14:51:46.000Z | xii/assembler/ufl_utils.py | MiroK/fenics_ii | 58c41f0e8dba720962830395851e081b057269cc | [
"MIT"
] | 6 | 2018-04-13T20:33:53.000Z | 2020-09-25T08:37:01.000Z | from dolfin.function.argument import Argument
from ufl.core.terminal import Terminal
import dolfin as df
def topological_dim(thing):
'''Extract topological dimension of thing's cell'''
domain = thing.ufl_domain() # None with e.g. Constants
return -1 if domain is None else domain.ufl_cell().topological_dimension()
def geometric_dim(thing):
'''Extract geoemtric dimension of thing's cell'''
domain = thing.ufl_domain()
return -1 if domain is None else domain.ufl_cell().geometric_dimension()
def is_terminal(o):
'''Is o a terminal type'''
return isinstance(o, Terminal)
def traverse(expr):
'''Traverse the UFL expression tree'''
if expr.ufl_operands:
for op in expr.ufl_operands:
for e in traverse(op):
yield e
yield expr
def traverse_terminals(expr):
'''
Yield all the termnals (can be duplicate) in the UFL expression tree
'''
return filter(is_terminal, traverse(expr))
def traverse_subexpr(expr):
'''Yield nodes of the UFL expression tree that have arguments'''
return filter(lambda e: not is_terminal(e), traverse(expr))
def is_equal_terminal(this, that, attributes=None):
'''
Trace introduces attributes to object. So comparison of terminals
must take this into account
'''
if is_terminal(this) and is_terminal(that):
ufl_equal = this == that
# For no attributes we default to standard ufl comparison
if not ufl_equal or attributes is None:
return ufl_equal
# All atributes are there and objects agree on their value
if all(hasattr(this, a) for a in attributes) and all(hasattr(that, a) for a in attributes):
return all(getattr(this, a) == getattr(that, a) for a in attributes)
return False
return False
def matches(expr, target, attributes=None):
'''Compare two UFL expression for equalty'''
# NOTE: this is 99.9% duplecate for ufl functionalty
# Terminal are the same if ==
if is_terminal(expr) and is_terminal(target):
return is_equal_terminal(expr, target, attributes)
# Not terminal need to agree on type and have the same argument
if not is_terminal(expr) and not is_terminal(target):
return all((isinstance(expr, type(target)),
all(matches(*ops, attributes=None)
for ops in zip(expr.ufl_operands, target.ufl_operands))))
return False
def contains(expr, target, attributes=None):
'''Is the target expression contained in the expression?'''
# A tarminal target either agrees or is one of the expr terminals
if is_terminal(target):
if is_terminal(expr):
return is_equal_terminal(expr, target, attributes)
else:
return any(matches(target, t, attributes) for t in traverse_terminals(expr))
# Target is a expression
if is_terminal(expr): return False
# The nodes need to match
ttarget = type(target)
for sexpr in traverse_subexpr(expr):
if matches(sexpr, target, attributes):
return True
return False
def replace(expr, arg, replacement, attributes=None):
'''Replace and argument in the expression by the replacement'''
# Do nothing if no way to substitute, i.e. return original
if not contains(expr, arg, attributes):
return expr
# Identical
if matches(expr, arg, attributes):
return replacement
# Reconstruct the node with the substituted argument
return type(expr)(*[replace(op, arg, replacement, attributes) for op in expr.ufl_operands])
def is_trial_function(v):
'''Is v a trial function'''
return isinstance(v, Argument) and v.number() == 1
def is_test_function(v):
'''Is v a test function'''
return isinstance(v, Argument) and v.number() == 0
def form_arity(form):
'''How many args in the form'''
return len(form.arguments())
# FIXME: fun problem is it linear?
def reconstruct(f):
'''Make a copy of terminal object'''
assert is_terminal(f)
if is_trial_function(f):
return df.TrialFunction(f.function_space())
if is_test_function(f):
return df.TestFunction(f.function_space())
# Others?
return f
| 31.057554 | 99 | 0.660644 | 590 | 4,317 | 4.749153 | 0.261017 | 0.046395 | 0.021413 | 0.021413 | 0.180228 | 0.15132 | 0.135617 | 0.087081 | 0.056388 | 0.027123 | 0 | 0.002163 | 0.250405 | 4,317 | 138 | 100 | 31.282609 | 0.863721 | 0.270558 | 0 | 0.115942 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007246 | 0.014493 | 1 | 0.202899 | false | 0 | 0.043478 | 0 | 0.608696 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
8fad3780b5ebc9508bdfc9f62b8013265d83efd2 | 197 | py | Python | admins/urls.py | Sult/evehub | 61872390d25dd089958f6b4bbc7677f8dd137297 | [
"MIT"
] | null | null | null | admins/urls.py | Sult/evehub | 61872390d25dd089958f6b4bbc7677f8dd137297 | [
"MIT"
] | null | null | null | admins/urls.py | Sult/evehub | 61872390d25dd089958f6b4bbc7677f8dd137297 | [
"MIT"
] | null | null | null | from django.conf.urls import patterns, url
from admins import views
urlpatterns = patterns(
'',
# Control panels
url(r'^admin/overview/$', views.overview, name='admin_overview'),
)
| 16.416667 | 69 | 0.690355 | 24 | 197 | 5.625 | 0.666667 | 0.192593 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.182741 | 197 | 11 | 70 | 17.909091 | 0.838509 | 0.071066 | 0 | 0 | 0 | 0 | 0.171271 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
8faeb13d8f905c19361af335e0eb34cc0470d735 | 648 | py | Python | minder_utils/configurations/__init__.py | minder-utils/minder_utils_light | d0bd7f7279d20b51a6f73aa55f885c04b31dd9f4 | [
"MIT"
] | null | null | null | minder_utils/configurations/__init__.py | minder-utils/minder_utils_light | d0bd7f7279d20b51a6f73aa55f885c04b31dd9f4 | [
"MIT"
] | null | null | null | minder_utils/configurations/__init__.py | minder-utils/minder_utils_light | d0bd7f7279d20b51a6f73aa55f885c04b31dd9f4 | [
"MIT"
] | null | null | null | import os
from pathlib import Path
from minder_utils.util.util import reformat_path
import yaml
p = Path(os.path.join(os.path.dirname(__file__), 'confidential'))
if not p.exists():
os.mkdir(reformat_path(p))
data_path = os.path.join(os.path.dirname(__file__), 'confidential', 'data_path.txt')
token_path = os.path.join(os.path.dirname(__file__), 'confidential', 'token_real.json')
dates_path = os.path.join(os.path.dirname(__file__), 'confidential', 'dates.json')
delta_path = os.path.join(os.path.dirname(__file__), 'confidential', 'delta.txt')
tihm_data_path = os.path.join(os.path.dirname(__file__), 'confidential', 'tihm_data_path.txt')
| 40.5 | 94 | 0.757716 | 100 | 648 | 4.54 | 0.27 | 0.15859 | 0.132159 | 0.185022 | 0.585903 | 0.585903 | 0.585903 | 0.585903 | 0.585903 | 0.207048 | 0 | 0 | 0.080247 | 648 | 15 | 95 | 43.2 | 0.761745 | 0 | 0 | 0 | 0 | 0 | 0.211747 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
8fbcc4c5e6588cf9b8d93e9638b85b3ec56bfe21 | 1,067 | py | Python | lab01/tiago-dalloca.py | Desnord/lab-mc102 | 470e5d942cf57305d8ba0b272f4c5f9aad4de11f | [
"Apache-2.0"
] | 7 | 2018-03-30T21:08:14.000Z | 2018-08-23T01:10:27.000Z | lab01/tiago-dalloca.py | Desnord/lab-mc102 | 470e5d942cf57305d8ba0b272f4c5f9aad4de11f | [
"Apache-2.0"
] | 2 | 2018-03-30T23:24:41.000Z | 2018-05-15T00:46:12.000Z | lab01/tiago-dalloca.py | Desnord/lab-mc102 | 470e5d942cf57305d8ba0b272f4c5f9aad4de11f | [
"Apache-2.0"
] | 7 | 2018-03-30T21:13:46.000Z | 2018-09-23T17:12:33.000Z | # DESCRIÇÃO
# Escreva um programa que calcule a circunferência C de um determinado
# planeta, com base na observação do ângulo A, entre duas localidades C1 e
# C2, e na distância D, em estádios, entre elas.
# Suponha que as localidades estejam no mesmo meridiano de um planeta
# esférico. O seu programa deverá imprimir a circunferência do planeta em
# estádios e em quilômetros.
# ENTRADA
# A entrada do programa será composta da distância D, em estádios, e do
# ângulo A, em graus, respectivamente, um número em cada linha.
# SAÍDA
# A saída mostra a circunferência Ce, em estádios, e Ckm, em quilômetros,
# do planeta seguindo o cálculo feito por Eratóstenes para a Terra com uma
# casa decimal de precisão.
def read_float():
return float(input())
# converte e para km
def e_to_km(e):
return 0.1764 * e
# printa floats com 1 digito depois da vírgula
def print_1f(f):
print("%.1f" % f)
D = read_float()
A = read_float()
# 360/A = o número de vezes que a distância D
# ocorre no planeta
E = D * (360 / A)
print_1f(E)
print_1f(e_to_km(E))
| 23.195652 | 74 | 0.725398 | 180 | 1,067 | 4.244444 | 0.483333 | 0.052356 | 0.043194 | 0.052356 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021127 | 0.2015 | 1,067 | 45 | 75 | 23.711111 | 0.875587 | 0.757263 | 0 | 0 | 0 | 0 | 0.016667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.272727 | false | 0 | 0 | 0.181818 | 0.454545 | 0.363636 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
8fc46a0cea8d599b2e3cb170bc4da8cc1384b79d | 461 | py | Python | sphinxcontrib/needs/services/base.py | twodrops/sphinxcontrib-needs | c2081bd499fe7d36840d29ca3fb05c80e7bf2284 | [
"MIT"
] | null | null | null | sphinxcontrib/needs/services/base.py | twodrops/sphinxcontrib-needs | c2081bd499fe7d36840d29ca3fb05c80e7bf2284 | [
"MIT"
] | null | null | null | sphinxcontrib/needs/services/base.py | twodrops/sphinxcontrib-needs | c2081bd499fe7d36840d29ca3fb05c80e7bf2284 | [
"MIT"
] | null | null | null | import sphinx
from pkg_resources import parse_version
sphinx_version = sphinx.__version__
if parse_version(sphinx_version) >= parse_version("1.6"):
from sphinx.util import logging
else:
import logging
logging.basicConfig()
class BaseService:
def __init__(self, *args, **kwargs):
self.log = logging.getLogger(__name__)
def request(self, *args, **kwargs):
raise NotImplementedError('Must be implemented by the service!')
| 23.05 | 72 | 0.724512 | 56 | 461 | 5.642857 | 0.589286 | 0.113924 | 0.189873 | 0.158228 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005291 | 0.180043 | 461 | 19 | 73 | 24.263158 | 0.830688 | 0 | 0 | 0 | 0 | 0 | 0.08243 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153846 | false | 0 | 0.307692 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
8fd1a869a307cb963df420776582253aee8db365 | 606 | py | Python | pytglib/api/types/rich_texts.py | iTeam-co/pytglib | e5e75e0a85f89b77762209b32a61b0a883c0ae61 | [
"MIT"
] | 6 | 2019-10-30T08:57:27.000Z | 2021-02-08T14:17:43.000Z | pytglib/api/types/rich_texts.py | iTeam-co/python-telegram | e5e75e0a85f89b77762209b32a61b0a883c0ae61 | [
"MIT"
] | 1 | 2021-08-19T05:44:10.000Z | 2021-08-19T07:14:56.000Z | pytglib/api/types/rich_texts.py | iTeam-co/python-telegram | e5e75e0a85f89b77762209b32a61b0a883c0ae61 | [
"MIT"
] | 5 | 2019-12-04T05:30:39.000Z | 2021-05-21T18:23:32.000Z |
from ..utils import Object
class RichTexts(Object):
"""
A concatenation of rich texts
Attributes:
ID (:obj:`str`): ``RichTexts``
Args:
texts (List of :class:`telegram.api.types.RichText`):
Texts
Returns:
RichText
Raises:
:class:`telegram.Error`
"""
ID = "richTexts"
def __init__(self, texts, **kwargs):
self.texts = texts # list of RichText
@staticmethod
def read(q: dict, *args) -> "RichTexts":
texts = [Object.read(i) for i in q.get('texts', [])]
return RichTexts(texts)
| 18.363636 | 61 | 0.551155 | 66 | 606 | 5 | 0.560606 | 0.054545 | 0.066667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.315182 | 606 | 32 | 62 | 18.9375 | 0.795181 | 0.387789 | 0 | 0 | 0 | 0 | 0.072555 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
8ff3bce5f3310585f9e33a3098800502c9b89141 | 868 | py | Python | astruct/type_hints/extras.py | misterfifths/nis_mods | 9d460414cb88d10f2737e9be90babe85c9856001 | [
"MIT"
] | 1 | 2021-10-18T13:42:09.000Z | 2021-10-18T13:42:09.000Z | astruct/type_hints/extras.py | misterfifths/nis-mods | 9d460414cb88d10f2737e9be90babe85c9856001 | [
"MIT"
] | null | null | null | astruct/type_hints/extras.py | misterfifths/nis-mods | 9d460414cb88d10f2737e9be90babe85c9856001 | [
"MIT"
] | null | null | null | import ctypes as C
from typing import TYPE_CHECKING, Sequence, Union
from .ctypes_aliases import AnyCType
if TYPE_CHECKING:
import mmap
"""
TODO: Better WriteableBuffer type? Feels like a MutableSequence[int] should be
fine, but there's some weirdness between the types struct.unpack_from and
ctypes.Structure.from_buffer want.
"""
# Intended to represent any type that can be passed to a ctypes from_buffer
# method, but there are some issues in the stubs that make that hard.
WriteableBuffer = Union[bytearray, memoryview, 'mmap.mmap']
# A single element of the _fields_ attribute of a ctypes.Structure or Union.
CStructureField = Union[tuple[str, type[AnyCType]], tuple[str, type[AnyCType], int]]
# The entire _fields_ attribute of a ctypes.Structure or Union.
CStructureFields = Sequence[CStructureField]
CStructureOrUnion = Union[C.Structure, C.Union]
| 33.384615 | 84 | 0.786866 | 126 | 868 | 5.34127 | 0.531746 | 0.066865 | 0.05052 | 0.053492 | 0.118871 | 0.118871 | 0.118871 | 0.118871 | 0 | 0 | 0 | 0 | 0.141705 | 868 | 25 | 85 | 34.72 | 0.903356 | 0.320277 | 0 | 0 | 0 | 0 | 0.023077 | 0 | 0 | 0 | 0 | 0.04 | 0 | 1 | 0 | false | 0 | 0.444444 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
8ffd6bcc2115d84d5eb85d847a161fe75fb0b230 | 108 | py | Python | examples/helloworld2.py | lam2mo/FPTuner | 29898c96741db341716a20ebfc4d12138cac2c99 | [
"MIT"
] | 14 | 2016-10-29T18:45:03.000Z | 2021-02-12T01:46:54.000Z | examples/helloworld2.py | lam2mo/FPTuner | 29898c96741db341716a20ebfc4d12138cac2c99 | [
"MIT"
] | 4 | 2017-09-12T13:24:54.000Z | 2021-02-19T03:21:18.000Z | examples/helloworld2.py | lam2mo/FPTuner | 29898c96741db341716a20ebfc4d12138cac2c99 | [
"MIT"
] | 4 | 2017-07-19T21:46:54.000Z | 2020-05-21T20:34:41.000Z |
import tft_ir_api as IR
A = IR.RealVE("A", 0, 0.0, 100.0)
rel = IR.BE("+", 4, A, A)
IR.TuneExpr(rel)
| 12 | 33 | 0.555556 | 24 | 108 | 2.416667 | 0.541667 | 0.103448 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.095238 | 0.222222 | 108 | 8 | 34 | 13.5 | 0.595238 | 0 | 0 | 0 | 0 | 0 | 0.018868 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8907501c9b28be9a6d3ff576ef6b0896b6546e5f | 470 | py | Python | geohash/models.py | sewald101/geohash_root | 64f56c21f4a0892e6b3bdc332140988363a5fab8 | [
"Unlicense"
] | 1 | 2020-12-10T23:23:47.000Z | 2020-12-10T23:23:47.000Z | geohash/models.py | sewald101/geohash_root | 64f56c21f4a0892e6b3bdc332140988363a5fab8 | [
"Unlicense"
] | 1 | 2021-06-04T23:33:06.000Z | 2021-06-04T23:33:06.000Z | geohash/models.py | sewald101/geohash_root | 64f56c21f4a0892e6b3bdc332140988363a5fab8 | [
"Unlicense"
] | null | null | null |
import datetime
from django.db import models
from django.utils import timezone
# Create your models here.
class Woeids(models.Model):
country = models.CharField(max_length=200, null=True, blank=True)
name = models.CharField(max_length=200, null=True, blank=True)
woeid = models.IntegerField(default=0, null=True, blank=True)
def __str__(self):
return '%s %s %s' % (
self.country,
self.name,
str(self.woeid)
)
| 23.5 | 69 | 0.66383 | 63 | 470 | 4.857143 | 0.507937 | 0.078431 | 0.127451 | 0.166667 | 0.287582 | 0.287582 | 0.287582 | 0.287582 | 0.287582 | 0 | 0 | 0.019178 | 0.223404 | 470 | 19 | 70 | 24.736842 | 0.819178 | 0.051064 | 0 | 0 | 0 | 0 | 0.0181 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.230769 | 0.076923 | 0.692308 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
890fa29094f2698ba0162646dededd0938cbb192 | 202 | py | Python | data_collection/gazette/spiders/sc_sao_domingos.py | kaiocp/querido-diario | 86004049c6eee305e13066cf3607d30849bb099a | [
"MIT"
] | 454 | 2018-04-07T03:32:57.000Z | 2020-08-17T19:56:22.000Z | data_collection/gazette/spiders/sc_sao_domingos.py | kaiocp/querido-diario | 86004049c6eee305e13066cf3607d30849bb099a | [
"MIT"
] | 254 | 2020-08-18T14:09:43.000Z | 2022-03-28T11:30:51.000Z | data_collection/gazette/spiders/sc_sao_domingos.py | kaiocp/querido-diario | 86004049c6eee305e13066cf3607d30849bb099a | [
"MIT"
] | 183 | 2018-04-11T15:09:37.000Z | 2020-08-15T18:55:11.000Z | from gazette.spiders.base.fecam import FecamGazetteSpider
class ScSaoDomingosSpider(FecamGazetteSpider):
name = "sc_sao_domingos"
FECAM_QUERY = "cod_entidade:244"
TERRITORY_ID = "4216107"
| 25.25 | 57 | 0.777228 | 22 | 202 | 6.909091 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.057803 | 0.143564 | 202 | 7 | 58 | 28.857143 | 0.820809 | 0 | 0 | 0 | 0 | 0 | 0.188119 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
89102e8ca848b6e2650eb69463ed0f3738b0b58d | 520 | py | Python | src/messages.py | K-Paul-Acct/wChanger | 70643b52a7305f14cf1ebc9aeff089273edee8c5 | [
"MIT"
] | 2 | 2021-04-06T10:03:57.000Z | 2022-03-22T07:39:25.000Z | src/messages.py | K-Paul-Acct/wChanger | 70643b52a7305f14cf1ebc9aeff089273edee8c5 | [
"MIT"
] | 1 | 2021-11-13T00:14:14.000Z | 2021-11-13T00:14:14.000Z | src/messages.py | K-Paul-Acct/wChanger | 70643b52a7305f14cf1ebc9aeff089273edee8c5 | [
"MIT"
] | null | null | null | bot_is_not_admin = 'Ой! Для того, чтобы бот мог менять название беседы, нужно сделать его админом.'
setting = 'Для настройки смены названия для вашей беседы введите, разделяя вертикальной ' \
'чертой "@wchanger Название чётной недели | Название нечётной недели"'
success = 'Бот успешно настроен и теперь будет менять название беседы!'
greeting = 'Привет! ' + setting + ' Не забудь сделать бота админом!'
user_is_not_admin = 'Извини, настроить бота могут только админы 😕'
error = 'Упс! Команда не распознана 😕'
| 65 | 99 | 0.751923 | 71 | 520 | 5.450704 | 0.732394 | 0.02584 | 0.05168 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.169231 | 520 | 7 | 100 | 74.285714 | 0.891204 | 0 | 0 | 0 | 0 | 0 | 0.757692 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
64e3db1ac6b37142f68fda74466fe0e7eed1abc3 | 304 | py | Python | scripts/remove_empty_models.py | emanjavacas/casket | 3edf2ec41c725adbb9a66532fb56a53a7e457f94 | [
"MIT"
] | null | null | null | scripts/remove_empty_models.py | emanjavacas/casket | 3edf2ec41c725adbb9a66532fb56a53a7e457f94 | [
"MIT"
] | null | null | null | scripts/remove_empty_models.py | emanjavacas/casket | 3edf2ec41c725adbb9a66532fb56a53a7e457f94 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import argparse
def remove_where(field, match_fn):
def transform(element):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Remove empty models from a db')
parser.add_argument('db-path')
args = parser.parse_args()
| 17.882353 | 52 | 0.674342 | 38 | 304 | 5.078947 | 0.815789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.213816 | 304 | 16 | 53 | 19 | 0.807531 | 0.065789 | 0 | 0 | 0 | 0 | 0.156028 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0.111111 | 0.111111 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
64e3e47a28caa9481770670a833516f65a1d1add | 717 | py | Python | backend/apps/users/filters.py | playonefor/turiy | e8065e0f6f8b3ade048375332b767a889a19c1df | [
"MIT"
] | null | null | null | backend/apps/users/filters.py | playonefor/turiy | e8065e0f6f8b3ade048375332b767a889a19c1df | [
"MIT"
] | null | null | null | backend/apps/users/filters.py | playonefor/turiy | e8065e0f6f8b3ade048375332b767a889a19c1df | [
"MIT"
] | null | null | null | from django_filters import rest_framework as filters
from django.db.models import Q
from django.contrib.auth import get_user_model
from users.models import tGroup
User = get_user_model()
class UsersFilter(filters.FilterSet):
'''
用户过滤
'''
username = filters.CharFilter(lookup_expr='icontains')
mobile = filters.CharFilter(lookup_expr='icontains')
email = filters.CharFilter(lookup_expr='icontains')
class Meta:
model = User
fields = ('username', 'mobile', 'email', 'is_active')
class tGroupFilter(filters.FilterSet):
'''
用户组过滤
'''
name = filters.CharFilter(lookup_expr='icontains')
class Meta:
model = tGroup
fields = ('name',)
| 21.088235 | 61 | 0.67643 | 81 | 717 | 5.851852 | 0.432099 | 0.14346 | 0.194093 | 0.227848 | 0.362869 | 0.21097 | 0.21097 | 0.21097 | 0 | 0 | 0 | 0 | 0.211994 | 717 | 33 | 62 | 21.727273 | 0.838938 | 0.013947 | 0 | 0.117647 | 0 | 0 | 0.100592 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.235294 | 0 | 0.705882 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
64f0e961423c1d65186320fe453b1e81ade39eac | 204 | py | Python | libs/request/header_const.py | jumper2014/http-api-test-framework-python-pytest | 0cd12def0aa36e643df7741340f7ffade28c4329 | [
"Apache-2.0"
] | null | null | null | libs/request/header_const.py | jumper2014/http-api-test-framework-python-pytest | 0cd12def0aa36e643df7741340f7ffade28c4329 | [
"Apache-2.0"
] | null | null | null | libs/request/header_const.py | jumper2014/http-api-test-framework-python-pytest | 0cd12def0aa36e643df7741340f7ffade28c4329 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# coding=utf-8
# author: zengyuetian
content_type_json = {'Content-Type': 'application/json'}
accept_type_json = {'Accept': 'application/json'}
if __name__ == "__main__":
pass | 20.4 | 56 | 0.705882 | 26 | 204 | 5.076923 | 0.692308 | 0.166667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005618 | 0.127451 | 204 | 10 | 57 | 20.4 | 0.735955 | 0.259804 | 0 | 0 | 0 | 0 | 0.389262 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.25 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
64f763d77c10eb46452117b6282cf68007248866 | 200 | py | Python | pysnake/__main__.py | otov4its/pysnake | dbfe36e464b660cddb4193a493fc0e37e55a251f | [
"MIT"
] | null | null | null | pysnake/__main__.py | otov4its/pysnake | dbfe36e464b660cddb4193a493fc0e37e55a251f | [
"MIT"
] | null | null | null | pysnake/__main__.py | otov4its/pysnake | dbfe36e464b660cddb4193a493fc0e37e55a251f | [
"MIT"
] | null | null | null | import curses
from .game import Game
def go(stdscr):
Game(stdscr).run() # Start game
def main():
# Curses convinient wrapper
curses.wrapper(go)
if __name__ == '__main__':
main() | 13.333333 | 36 | 0.65 | 26 | 200 | 4.692308 | 0.538462 | 0.114754 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.23 | 200 | 15 | 37 | 13.333333 | 0.792208 | 0.18 | 0 | 0 | 0 | 0 | 0.049383 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f0275d3ff8e79732e2092d8d35642359defd84e | 479 | py | Python | utils/__init__.py | CSuppan/two-shot-brdf-shape | 352201b66bfa5cd5e25111451a6583a3e7d499f0 | [
"BSD-Source-Code"
] | null | null | null | utils/__init__.py | CSuppan/two-shot-brdf-shape | 352201b66bfa5cd5e25111451a6583a3e7d499f0 | [
"BSD-Source-Code"
] | null | null | null | utils/__init__.py | CSuppan/two-shot-brdf-shape | 352201b66bfa5cd5e25111451a6583a3e7d499f0 | [
"BSD-Source-Code"
] | null | null | null | # -----------------------------------------------------------------------
# Copyright (c) 2020, NVIDIA Corporation. All rights reserved.
#
# This work is made available
# under the Nvidia Source Code License (1-way Commercial).
#
# Official Implementation of the CVPR2020 Paper
# Two-shot Spatially-varying BRDF and Shape Estimation
# Mark Boss, Varun Jampani, Kihwan Kim, Hendrik P. A. Lensch, Jan Kautz
# -----------------------------------------------------------------------
| 43.545455 | 73 | 0.528184 | 48 | 479 | 5.270833 | 0.958333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021327 | 0.118998 | 479 | 10 | 74 | 47.9 | 0.578199 | 0.956159 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f0d306d518d0d86a40d7ee992fbad6f04fe875f | 243 | py | Python | vocoder/hifigan/env.py | 10088/MockingBird | 6a793cea8488ad40fcad6ab30f9d82bc920ac114 | [
"MIT"
] | 1 | 2021-12-08T16:33:17.000Z | 2021-12-08T16:33:17.000Z | vocoder/hifigan/env.py | 10088/MockingBird | 6a793cea8488ad40fcad6ab30f9d82bc920ac114 | [
"MIT"
] | null | null | null | vocoder/hifigan/env.py | 10088/MockingBird | 6a793cea8488ad40fcad6ab30f9d82bc920ac114 | [
"MIT"
] | null | null | null | import os
import shutil
def build_env(config, config_name, path):
t_path = os.path.join(path, config_name)
if config != t_path:
os.makedirs(path, exist_ok=True)
shutil.copyfile(config, os.path.join(path, config_name))
| 27 | 64 | 0.695473 | 38 | 243 | 4.263158 | 0.447368 | 0.185185 | 0.08642 | 0.17284 | 0.296296 | 0.296296 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1893 | 243 | 8 | 65 | 30.375 | 0.822335 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.285714 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f10ee40e95cf26a981af25a14d379eb64be7e96 | 540 | py | Python | app/routers/openstack/sizes.py | skyworkflows/swm-cloud-gate | 5cae6bc62b5abeb6c985b0b1d1b7d4f7484416af | [
"BSD-3-Clause"
] | 3 | 2021-04-05T18:32:27.000Z | 2021-04-06T08:14:22.000Z | app/routers/openstack/sizes.py | skyworkflows/swm-cloud-gate | 5cae6bc62b5abeb6c985b0b1d1b7d4f7484416af | [
"BSD-3-Clause"
] | null | null | null | app/routers/openstack/sizes.py | skyworkflows/swm-cloud-gate | 5cae6bc62b5abeb6c985b0b1d1b7d4f7484416af | [
"BSD-3-Clause"
] | null | null | null | import typing
from fastapi import APIRouter, Header
from .connector import OpenStackConnector
from .models import convert_to_flavor
CONNECTOR = OpenStackConnector()
ROUTER = APIRouter()
@ROUTER.get("/openstack/flavors")
async def list_flavors(username: str = Header(None), password: str = Header(None)):
CONNECTOR.reinitialize(username, password, "compute")
flavor_list: typing.List[ImageInfo] = []
for item in CONNECTOR.list_sizes():
flavor_list.append(convert_to_flavor(item))
return {"flavors": flavor_list}
| 28.421053 | 83 | 0.753704 | 64 | 540 | 6.21875 | 0.5 | 0.075377 | 0.075377 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.140741 | 540 | 18 | 84 | 30 | 0.857759 | 0 | 0 | 0 | 0 | 0 | 0.059259 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.153846 | 0.307692 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
8f12996b37d2624cf9dc2205e4c9a3d5e5086701 | 831 | py | Python | tests/test_ptkcmd/myptkcmd.py | mmiguel6288code/ptkcmd | 825a88d083b5a6114b054421f4067d37b889606f | [
"MIT"
] | 8 | 2020-09-02T21:20:33.000Z | 2022-02-03T19:31:25.000Z | tests/test_ptkcmd/myptkcmd.py | mmiguel6288code/ptkcmd | 825a88d083b5a6114b054421f4067d37b889606f | [
"MIT"
] | null | null | null | tests/test_ptkcmd/myptkcmd.py | mmiguel6288code/ptkcmd | 825a88d083b5a6114b054421f4067d37b889606f | [
"MIT"
] | 1 | 2020-11-12T06:32:48.000Z | 2020-11-12T06:32:48.000Z | from ptkcmd import PtkCmd, Completion, complete_files
class MyPtkCmd(PtkCmd):
prompt='MyPtkCmd$ '
def __init__(self,stdin=None,stdout=None,intro=None,interactive=True,do_complete_cmd=True,default_shell=False,**psession_kwargs):
super().__init__(stdin,stdout,intro,interactive,do_complete_cmd,default_shell,**psession_kwargs)
def do_mycmd(self,*args):
"""
This command is documented.
"""
self.stdout.write('Args were %s\n' % repr(args))
def do_myundoc(self,*args):
self.stdout.write('Args were %s\n' % repr(args))
def help_mytopic(self):
self.stdout.write('You called help for mytopic\n')
def complete_mycmd(self,prev_args,curr_arg,document,complete_event):
yield from complete_files(curr_arg)
def test_ptkcmd():
MyPtkCmd().cmdloop()
| 33.24 | 133 | 0.696751 | 112 | 831 | 4.928571 | 0.455357 | 0.054348 | 0.081522 | 0.068841 | 0.130435 | 0.130435 | 0.130435 | 0.130435 | 0.130435 | 0.130435 | 0 | 0 | 0.175692 | 831 | 24 | 134 | 34.625 | 0.805839 | 0.032491 | 0 | 0.133333 | 0 | 0 | 0.086118 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0.066667 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
8f1535cce7276091ce2be0169366ccc905627a69 | 7,925 | py | Python | Beginner/03. Python/ZodiacSignTraits.py | DipadityaDas/Hacktoberfest | 0e403775b9f1daaccae5d13084f91e4c4b44ef18 | [
"MIT"
] | 1 | 2020-10-19T10:02:16.000Z | 2020-10-19T10:02:16.000Z | Beginner/03. Python/ZodiacSignTraits.py | DipadityaDas/Hacktoberfest | 0e403775b9f1daaccae5d13084f91e4c4b44ef18 | [
"MIT"
] | null | null | null | Beginner/03. Python/ZodiacSignTraits.py | DipadityaDas/Hacktoberfest | 0e403775b9f1daaccae5d13084f91e4c4b44ef18 | [
"MIT"
] | null | null | null | def showAries():
print("ARIES (March 21 – April 19)")
print("")
print("Aries is independent in nature, fun loving, impulsive and a tough cookie. You desire big goals and possess the determination to accomplish them without getting weakened by any hurdle. " +
"The courage and ambition you hold are worthy enough to drive you ahead. Sometimes your bossy behavior tends to be hurtful.")
print("")
print("Try to strengthen your ego and not suppress others.")
def showTaurus():
print("TAURUS (April 20 – May 20)")
print("")
print("Taurus, the born leaders, are morally and ethically upright and loyal. You possess a firm spirit which keeps you moving on the right path. " +
"Your ethicality knows no limits. You keep your emotions and feelings to yourself. The unmoving emotions and behavior turn you into a reserved and obstinate person. " +
"Your work act as the barricade between your emotions and you as it consumes all your time and restricts you in sharing what you feel.")
print("")
print("You must open and communicate as it would provide you relief rather than hurting you.")
def showGemini():
print("GEMINI (May 21 – June 20)")
print("")
print("The Combination of charismatic persona and open personality. Your versatility, intelligence, and witty behavior draw the attention of people. " +
"Being happy is your life's Mantra, no matter what ups and downs you face. Your excellent communication skills keep your social circle growing. " +
"As you easily get bored, you tend to be insensitive towards others’ feelings.")
print("")
print("Try to have a better understanding of others' perspective and feelings too.")
def showCancer():
print("CANCER (June 21 – July 22)")
print("")
print("Cancer is the most understanding, generous and emotional among all. And, the biggest introvert with the deepest feelings and emotions, on the other side. " +
"Rather than living in a practical world, you live in a world of fantasy with hundreds of voices revolving in your head and guiding your way.")
print("")
print("Try to have more social connections and live in a practical world.")
def showLeo():
print("LEO (July 23 – Aug. 22)")
print("")
print("Leo, the born leaders, are noble, honest and confident. You always remain in the spotlight and holds a persona which attracts others. " +
"But sometimes you are self-centered and highly rely upon others' viewpoints and opinion.")
print("")
print("You must ensure that your decisions are not dependent on others, rather, they must be based on your own views.")
def showVirgo():
print("VIRGO (Aug. 23 – Sept. 22)")
print("")
print("Virgo is practical, meticulous and cool-headed. You are the one with the solution to every problem. In the race of heart and head, you always listen to your head. " +
"Sometimes you set unrealistic goals which are hard to accomplish that also creates a difficult situation for you and others.")
print("")
print("You must go with the flow rather than controlling everything.")
def showLibra():
print("LIBRA (Sept. 23 – Oct. 22)")
print("")
print("Libra is diplomatic but is balanced in nature, possess the carefree attitude and are pleasant in behavior. " +
"You are balanced in making decisions but because of your careless behavior, you face difficult situations. " +
"You try to make others comfortable. You think too much which results in delayed decisions.")
print("")
print("Try to take prompt decisions by believing in yourself.")
def showScorpio():
print("SCORPIO (Oct. 23 – Nov. 21)")
print("")
print("Scorpio is people with a sensual, creative and confident persona. Your sensual nature makes you the best lovers of all the zodiacs. " +
"You hold a magnetic pull which draws the attention of people. You have a sharp memory and a revengeful streak which can be dangerous for your enemies.")
print("")
print("It is good for you to move on and forgive rather than keeping knots of grudges.")
def showSagittarius():
print("SAGITTARIUS (Nov. 22 – Dec. 21)")
print("")
print("Sagittarius holds explosive, fiery and generous personality. You attract people with your funny behavior and generosity. " +
"Many times you also hurt others with your frank and fearless attitude as you unintentionally hurt others with your outspoken attitude.")
print("")
print("Try to listen to other people too and remain emotionally-available.")
def showCapricorn():
print("CAPRICORN (Dec. 22 – Jan. 19)")
print("")
print("Capricorns are hardworking, calm, determined and motivated. You are helpful and practical in nature. " +
"Whatever the challenges are, you always remain ready and keep attaining your desired goals. Your workaholic nature and competitive spirit can sometimes be annoying to others.")
print("")
print("Sometimes slowing down your speed might help you cherish every moment.")
def showAquarius():
print("AQUARIUS (Jan. 20 – Feb. 18)")
print("")
print("Being Aquarius, you are alluring, adventurous and captivating in nature. You possess worldly knowledge because of your passion for travel. " +
"Your personality is full of intelligence and you keep the conversation going for hours owing to your excellent communication skills. " +
"However, your independent and unpredictable nature sometimes makes difficult for people to understand you.")
print("")
print("It’s good to communicate with an open heart, to make your trips more happening.")
def showPisces():
print("PISCES (Feb. 19 – March 20)")
print("")
print("Being a Pisces, you tend to be compassionate, sympathetic and artistic. You are the one with a pure heart and always remain kind to others. " +
"You hold a personality that craves solitude. You live in a world of dreams and imagination and possess the huge interest in music and art.")
print("")
print("Try to live in a practical world rather than losing yourself in thoughts and imaginations.")
def main():
sign = input("Enter your zodiac sign to know your sign's characteristics and personality traits: ")
if sign.lower() == "aries":
showAries()
print("")
print("")
main()
elif sign.lower() == "taurus":
showTaurus()
print("")
print("")
main()
elif sign.lower() == "gemini":
showGemini()
print("")
print("")
main()
elif sign.lower() == "cancer":
showCancer()
print("")
print("")
main()
elif sign.lower() == "leo":
showLeo()
print("")
print("")
main()
elif sign.lower() == "virgo":
showVirgo()
print("")
print("")
main()
elif sign.lower() == "libra":
showLibra()
print("")
print("")
main()
elif sign.lower() == "scorpio":
showScorpio()
print("")
print("")
main()
elif sign.lower() == "sagittarius":
showSagittarius()
print("")
print("")
main()
elif sign.lower() == "capricorn":
showCapricorn()
print("")
print("")
main()
elif sign.lower() == "aquarius":
showAquarius()
print("")
print("")
main()
elif sign.lower() == "pisces":
showPisces()
print("")
print("")
main()
else:
print("Sorry, you have entered an invalid zodiac sign. Please try again.")
print("")
print("")
main()
main()
| 46.345029 | 199 | 0.644164 | 1,036 | 7,925 | 4.939189 | 0.361969 | 0.072308 | 0.035568 | 0.038695 | 0.086574 | 0.064686 | 0 | 0 | 0 | 0 | 0 | 0.008207 | 0.261956 | 7,925 | 170 | 200 | 46.617647 | 0.864592 | 0 | 0 | 0.410256 | 0 | 0.134615 | 0.659273 | 0.002837 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0.012821 | 0 | 0 | 0.083333 | 0.557692 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
8f17a41721088444baaa6e7c92cf04c8870b502e | 25,714 | py | Python | neural-navigation-with-lstm/MARCO/nltk/test/token.py | ronaldahmed/SLAM-for-ugv | 52e3241b8b737a0cfe5682c0aa87ec8c27d6a33d | [
"MIT"
] | 14 | 2016-04-03T19:25:13.000Z | 2022-01-05T07:03:07.000Z | neural-navigation-with-lstm/MARCO/nltk/test/token.py | ronaldahmed/SLAM-for-ugv | 52e3241b8b737a0cfe5682c0aa87ec8c27d6a33d | [
"MIT"
] | null | null | null | neural-navigation-with-lstm/MARCO/nltk/test/token.py | ronaldahmed/SLAM-for-ugv | 52e3241b8b737a0cfe5682c0aa87ec8c27d6a33d | [
"MIT"
] | 5 | 2018-06-21T12:58:58.000Z | 2020-02-15T05:33:39.000Z | # Natural Language Toolkit: Test Code for Tokens and Tokenizers
#
# Copyright (C) 2001 University of Pennsylvania
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
#
# $Id: token.py,v 1.1.1.2 2004/09/29 21:58:12 adastra Exp $
"""
Unit testing for L{nltk.token}.
@group Tokens: test_Token, test_SafeToken, test_SafeToken_checks,
test_TokenReprLocError, test_FrozenToken,
test_ProbabilisticToken
@group Locations: test_Location
@group Demo: test_demo
"""
from nltk.token import *
from nltk.util import mark_stdout_newlines
def test_Token(): """
Unit testing for L{Token}.
>>> Token.USE_SAFE_TOKENS=False
Token Basics
============
A token is a mapping from properties to values. It can be constructed
via keyword arguments, or from an initial dictionary:
>>> tok = Token(A='foo', B=12)
>>> tok
<A='foo', B=12>
>>> tok2 = Token({'A':'foo', 'B':12})
>>> tok2
<A='foo', B=12>
>>> tok3 = Token({'A':'foo'}, B=12)
>>> tok3
<A='foo', B=12>
A token can define zero or more properties:
>>> print Token()
<>
>>> print Token(A='foo')
<A='foo'>
>>> print Token(A='foo', B=12, C='bar', D='baz')
<A='foo', B=12, C='bar', D='baz'>
Properties are accessed via indexing:
>>> tok['A']
'foo'
>>> tok['B']
12
Properties may be added, modified, or deleted:
>>> tok['C'] = 'baz'
>>> tok
<A='foo', B=12, C='baz'>
>>> tok['B'] = 'bar'
>>> tok
<A='foo', B='bar', C='baz'>
>>> del tok['A']
>>> tok
<B='bar', C='baz'>
The list of defined properties can be accessed via L{Token.properties}
and L{Token.has}:
>>> props = tok.properties()
>>> props.sort()
>>> props
['B', 'C']
>>> tok.has('B')
True
>>> tok.has('TEXT')
False
Tokens may contain nested tokens. In particular, a property can
contain a single token:
>>> tok1 = Token(A=12)
>>> tok2 = Token(SUBTOK=tok1, SIZE=144)
>>> print tok2
<SIZE=144, SUBTOK=<A=12>>
Or a list of tokens:
>>> words = [Token(TEXT=word) for word in 'on the table'.split()]
>>> phrase = Token(WORDS=words, TYPE='PP')
>>> print phrase
<TYPE='PP', WORDS=[<on>, <the>, <table>]>
Or a tuple of tokens:
>>> phrase = Token(WORDS=tuple(words), TYPE='PP')
>>> print phrase
<TYPE='PP', WORDS=(<on>, <the>, <table>)>
Or a dictionary containing tokens:
>>> tokdict = {'X': Token(TEXT='X')}
>>> tok = Token(TOKDICT=tokdict)
>>> print tok
<TOKDICT={'X': <X>}>
Tokens can be nested to arbirary depth:
>>> Token(A=Token(B=Token(C=Token(D=Token(E=12)))))
<A=<B=<C=<D=<E=12>>>>>
Tokens can be connected in cycles:
>>> tok1, tok2 = Token(), Token()
>>> tok1['A'] = tok2
>>> tok2['B'] = tok1
>>> print tok1, tok2
<A=<B=<...>>> <B=<A=<...>>>
>>> tok1['A']['B'] is tok1
True
>>> tok1['A']['B']['A']['B']['A']['B']['A']['B'] is tok1
True
Tokens can be compared for equality and ordering. Results should be
identical to comparing the corresponding dictionaries:
>>> tok1 = Token(A=[1], B=12)
>>> tok2 = Token(A=[1], B=12)
>>> tok3 = Token(A=[4], B=12)
>>> print tok1 == tok2, tok2 == tok3, tok3 == tok1
True False False
>>> print tok1 != tok2, tok2 != tok3, tok3 != tok1
False True True
>>> tok1 < tok2 or tok1 > tok2
False
>>> tok2 < tok3 or tok3 > tok2
True
Tokens are a subclass of dict, and support all the standard dict
methods.
>>> tok = Token(TEXT='watch', TAG='NN', SPEAKER='FRAN')
>>> tok
<SPEAKER='FRAN', TAG='NN', TEXT='watch'>
>>> keys = tok.keys(); keys.sort(); print keys
['SPEAKER', 'TAG', 'TEXT']
>>> items = tok.items(); items.sort(); print items
[('SPEAKER', 'FRAN'), ('TAG', 'NN'), ('TEXT', 'watch')]
>>> tok.setdefault('TAG', 'JJ')
'NN'
>>> tok.setdefault('ACCENT', 3)
3
>>> tok.pop('SPEAKER')
'FRAN'
>>> print tok.pop('SPEAKER', None)
None
>>> print tok
<ACCENT=3, TAG='NN', TEXT='watch'>
>>> tok.get('TAG', 'VB')
'NN'
>>> tok.get('TOG', 'VB')
'VB'
>>> tok.update({'TOG': 'XYZ'})
>>> print tok
<ACCENT=3, TAG='NN', TEXT='watch', TOG='XYZ'>
To avoid confusion, tokens raise exceptions when tested for length or
truth value:
>>> len(tok)
Traceback (most recent call last):
[...]
TypeError: len() of unsized Token object
>>> bool(tok)
Traceback (most recent call last):
[...]
TypeError: Token objects cannot be used as booleans
Token Representations
=====================
Special string representations can be registered for specific sets of
properties. By default, special representations are registered for
combinations of the C{TEXT}, C{TAG}, and C{LOC} properties:
>>> Token(TEXT='movie')
<movie>
>>> Token(TEXT='movie', TAG='NN')
<movie/NN>
>>> Token(TEXT='movie', LOC=CharSpanLocation(0,5))
<movie>@[0:5c]
>>> Token(TEXT='movie', TAG='NN', LOC=CharSpanLocation(0,5))
<movie/NN>@[0:5c]
New representations can be registered with L{Token.register_repr},
which takes a tuple of properties and a string or function
representation:
>>> Token.register_repr(('APPLE', 'BAG'), '{{%(APPLE)s::%(BAG)s}}')
>>> Token(APPLE='foo', BAG=12)
{{foo::12}}
>>> def reprfunc(tok):
... props = tok.properties()
... props.sort()
... return '>=<'.join(props)
>>> Token.register_repr(('BAG','CAR'), reprfunc)
>>> Token(BAG='zippy', CAR='dooh dah')
BAG>=<CAR
To deregister a representation, register it as C{None}:
>>> Token.register_repr(('APPLE', 'BAG'), None)
>>> Token.register_repr(('BAG', 'CAR'), None)
>>> Token(APPLE='foo', BAG=12)
<APPLE='foo', BAG=12>
>>> Token(BAG='zippy', CAR='dooh dah')
<BAG='zippy', CAR='dooh dah'>
Copying Tokens
==============
C{Token.copy} creates a new copy of an existing token:
>>> tok1 = Token(TEXT='car', TAG='NN')
>>> tok2 = tok1.copy()
>>> print tok1, tok2
<car/NN> <car/NN>
>>> tok1 is tok2
False
>>> tok1['TEXT'] = 'boat'
>>> print tok1, tok2
<boat/NN> <car/NN>
By default, a deep copy is made:
>>> tok1 = Token(A=[1,2,3], B=Token(C=12))
>>> tok2 = tok1.copy()
>>> tok1 is tok2
False
>>> tok1['A'] is tok2['A']
False
>>> tok1['B'] is tok2['B']
False
To make a shallow copy, use C{deep=False}:
>>> tok1 = Token(A=[1,2,3], B=Token(C=12))
>>> tok2 = tok1.copy(deep=False)
>>> tok1 is tok2
False
>>> tok1['A'] is tok2['A']
True
>>> tok1['B'] is tok2['B']
True
Freezing Tokens
===============
Tokens are not hashable, and so they can not be used as dictionary
keys:
>>> tok = Token(TEXT='cold', TAG='JJ')
>>> {tok:1}
Traceback (most recent call last):
[...]
TypeError: Token objects are unhashable
L{Token.freeze} creates an immutable copy of a token, which I{can} be
as a dictionary key:
>>> frozen_tok = Token(TEXT='cold', TAG='JJ').freeze()
>>> {frozen_tok:1}
{<cold/JJ>: 1}
Frozen tokens are immutable, and can not be modified:
>>> frozen_tok['TAG'] = 'NN'
Traceback (most recent call last):
[...]
TypeError: FrozenToken objects are immutable
Any contained objects are also frozen. Lists are automatically
converted to tuples, and dicts to FrozenDicts.
>>> Token(A=Token(B=12)).freeze()
<A=<B=12>>
>>> Token(A=[1, Token(B=12)]).freeze()
<A=(1, <B=12>)>
>>> Token(A=(1, Token(B=12))).freeze()
<A=(1, <B=12>)>
>>> Token(A={1: Token(B=12)}).freeze()
<A={1: <B=12>}>
>>> type(Token(A={1: Token(B=12)}).freeze()['A'])
<class 'nltk.util.FrozenDict'>
Iterators are automatically converted to tuples:
>>> tokiter = iter([1, Token(B='x', C='y')])
>>> Token(A=tokiter).freeze()
<A=(1, <B='x', C='y'>)>
Cyclic tokens can be frozen:
>>> tok1, tok2 = Token(), Token()
>>> tok1['A'] = tok2
>>> tok2['B'] = tok1
>>> print tok1.freeze(), tok2.freeze()
<A=<B=<...>>> <B=<A=<...>>>
Exclude and Project
===================
L{Token.exclude} and L{Token.project} can be used to create a new
token with a restricted set of properties. C{exclude} creates a new
token that excludes the given list of properties:
>>> tok1 = Token(TEXT='car', TAG='NN', SPEAKER='Joe')
>>> tok2 = tok1.exclude('SPEAKER')
>>> tok3 = tok1.exclude('TEXT', 'TAG')
>>> print tok1, tok2, tok3
<SPEAKER='Joe', TAG='NN', TEXT='car'> <car/NN> <SPEAKER='Joe'>
C{project} creates a new token that I{only} includes the given list of
properties:
>>> tok1 = Token(TEXT='car', TAG='NN', SPEAKER='Joe')
>>> tok2 = tok1.project('SPEAKER')
>>> tok3 = tok1.project('TEXT', 'TAG')
>>> print tok1, tok2, tok3
<SPEAKER='Joe', TAG='NN', TEXT='car'> <SPEAKER='Joe'> <car/NN>
By default, both C{exclude} and C{project} are recursively applied to
any contained subtokens:
>>> tok = Token(A=Token(B='x', C=Token(D='d'), E='e'))
>>> print tok
<A=<B='x', C=<D='d'>, E='e'>>
>>> print tok.exclude('A')
<>
>>> print tok.exclude('B')
<A=<C=<D='d'>, E='e'>>
>>> print tok.exclude('C')
<A=<B='x', E='e'>>
>>> print tok.exclude('D')
<A=<B='x', C=<>, E='e'>>
>>> print tok.exclude('B', 'E')
<A=<C=<D='d'>>>
>>> print tok.project('A')
<A=<>>
>>> print tok.project('A', 'D')
<A=<>>
>>> print tok.project('A', 'C', 'D')
<A=<C=<D='d'>>>
This includes subtokens included in lists, tuples, and dictionaries:
>>> Token(A=[1, Token(B='x', C='y')]).exclude('B')
<A=[1, <C='y'>]>
>>> Token(A=(1, Token(B='x', C='y'))).exclude('B')
<A=(1, <C='y'>)>
>>> Token(A={1: Token(B='x', C='y')}).exclude('B')
<A={1: <C='y'>}>
It also includes iterators:
>>> tokiter = iter([1, Token(B='x', C='y')])
>>> tok = Token(A=tokiter).exclude('B')
>>> for elt in tok['A']:
... print elt
1
<C='y'>
C{exclude} and C{project} both work with cyclic tokens:
>>> tok1, tok2 = Token(), Token()
>>> tok1['A'] = tok2
>>> tok2['B'] = tok1
>>> tok2['C'] = tok1
>>> print tok1
<A=<B=<...>, C=<...>>>
>>> print tok1.exclude('B')
<A=<C=<...>>>
"""
def test_TokenReprLocError(): """
Currently, the generic repr function checks that the C{LOC} property
is actually a location:
>>> Token.USE_SAFE_TOKEN = False
>>> print Token(A='x', LOC='y')
Traceback (most recent call last):
[...]
AssertionError: self['LOC'] is not a location!
"""
def test_FrozenToken(): """
Unit testing for L{FrozenToken}.
Frozen tokens can be contructed via freezing a normal token, or
directly:
>>> tok1 = Token(A='a', B=99).freeze()
>>> tok2 = FrozenToken(A='a', B=99)
Frozen tokens are immutable:
>>> tok1['A'] = 'newval'
Traceback (most recent call last):
[...]
TypeError: FrozenToken objects are immutable
>>> del tok1['A']
Traceback (most recent call last):
[...]
TypeError: FrozenToken objects are immutable
>>> tok1.clear()
Traceback (most recent call last):
[...]
TypeError: FrozenToken objects are immutable
>>> tok1.pop('A')
Traceback (most recent call last):
[...]
TypeError: FrozenToken objects are immutable
>>> tok1.popitem()
Traceback (most recent call last):
[...]
TypeError: FrozenToken objects are immutable
>>> tok1.setdefault('C', 'c')
Traceback (most recent call last):
[...]
TypeError: FrozenToken objects are immutable
>>> tok1.update({'C': 'c'})
Traceback (most recent call last):
[...]
TypeError: FrozenToken objects are immutable
"""
# Copy test_Token, but use SafeTokens instead of Tokens.
def test_SafeToken(): pass
test_SafeToken.__doc__ = test_Token.__doc__.replace(
'Token.USE_SAFE_TOKENS=False',
'Token.USE_SAFE_TOKENS=True').replace(
'Unit testing for L{Token}.',
'Unit testing for L{SafeToken}.').replace(
'TypeError: Token objects are unhashable',
'TypeError: SafeToken objects are unhashable').replace(
'TypeError: Token objects cannot be used as booleans',
'TypeError: SafeToken objects cannot be used as booleans').replace(
'TypeError: len() of unsized Token object',
'TypeError: len() of unsized SafeToken object')
def test_SafeToken_checks(): """
Additional unit tests for C{SafeToken}.
>>> Token.USE_SAFE_TOKENS=True
The C{LOC} property must contain a location:
>>> Token(TEXT='dog', LOC=CharSpanLocation(0,2))
<dog>@[0:2c]
>>> Token(TEXT='dog', LOC=(0,2))
Traceback (most recent call last):
[...]
TypeError: The 'LOC' property must contain a Location
>>> Token(TEXT='dog')['LOC'] = (0,2)
Traceback (most recent call last):
[...]
TypeError: The 'LOC' property must contain a Location
>>> Token(TEXT='dog').setdefault('LOC', (0,2))
Traceback (most recent call last):
[...]
TypeError: The 'LOC' property must contain a Location
>>> Token(TEXT='dog').update({'LOC': (0,2)})
Traceback (most recent call last):
[...]
TypeError: The 'LOC' property must contain a Location
Exclude and project can't be given bad options:
>>> Token().project(x=1)
Traceback (most recent call last):
[...]
ValueError: Bad option 'x'
>>> Token().exclude(x=1)
Traceback (most recent call last):
[...]
ValueError: Bad option 'x'
"""
def test_Location(): """
Unit tests for L{LocationI} and its implementations.
C{LocationI} is an abstract interface for locations. It can't be
instantiated directly:
>>> LocationI()
Traceback (most recent call last):
[...]
AssertionError: Interfaces can't be instantiated
It declares 3 methods, which must be implemented by derived classes:
>>> class BrokenLocation(LocationI):
... pass
>>> BrokenLocation().source()
Traceback (most recent call last):
[...]
AssertionError
>>> cmp(BrokenLocation(), 1)
Traceback (most recent call last):
[...]
AssertionError
>>> hash(BrokenLocation())
Traceback (most recent call last):
[...]
AssertionError
C{SpanLocation} is an abstract base class for locations that are based
on spans. It can't be instantiated directly:
>>> SpanLocation(0,1)
Traceback (most recent call last):
[...]
AssertionError: Abstract classes can't be instantiated
CharSpanLocation
================
C{CharSpanLocation} is a location class derived from C{SpanLocation}.
A C{CharSpanLocation} is constructed from a start, an end, and an
optional source:
>>> loc1 = CharSpanLocation(0, 5, 'foo.txt')
>>> loc2 = CharSpanLocation(0, 5)
>>> loc3 = CharSpanLocation(8, 12)
>>> print loc1, loc2, loc3
[0:5c]@foo.txt [0:5c] [8:12c]
The start, end, and source are accessed via methods:
>>> print loc1.start(), loc1.end(), loc1.source()
0 5 foo.txt
The length is available via a C{length} method, and the C{len}
operator:
>>> print loc1.length(), len(loc1)
5 5
>>> print loc3.length(), len(loc3)
4 4
C{CharSpanLocations} are equal if their start, end, and source are
equal:
>>> loc1 = CharSpanLocation(0,5,'foo.txt')
>>> loc2 = CharSpanLocation(1,5,'foo.txt')
>>> loc3 = CharSpanLocation(0,6,'foo.txt')
>>> loc4 = CharSpanLocation(0,5)
>>> loc5 = CharSpanLocation(0,5,'bar.txt')
>>> print loc1==loc1, loc1==loc2, loc1==loc3, loc1==loc4, loc1==loc5
True False False False False
>>> loc1 == CharSpanLocation(0,5,'foo.txt')
True
C{CharSpanLocations} are hashable, and so they can be used as
dictionary keys:
>>> {loc1: 10}
{[0:5c]: 10}
Ordering
--------
A total ordering on C{CharSpanLocations} is defined by preceeds,
succeeds, and overlaps.
Two locations overlap if one's start falls between the other's start
and end:
>>> loc1 = CharSpanLocation(8,12)
>>> loc2 = CharSpanLocation(10,14)
>>> print loc1.overlaps(loc2), loc2.overlaps(loc1)
True True
Two locations are also considered to overlap if they are both
zero-length locations at the same index:
>>> loc1 = CharSpanLocation(8,8)
>>> loc2 = CharSpanLocation(8,8)
>>> print loc1.overlaps(loc2)
True
Two locations are I{not} considered to overlap if they share a common
boundary:
>>> loc1 = CharSpanLocation(8, 12)
>>> loc2 = CharSpanLocation(12, 14)
>>> print loc1.overlaps(loc2), loc2.overlaps(loc1)
False False
Note that this definition of I{overlaps} is symmetric and reflexive,
but not transitive:
>>> loc1 = CharSpanLocation(8, 12)
>>> loc2 = CharSpanLocation(11, 15)
>>> loc3 = CharSpanLocation(14, 20)
>>> print loc1.overlaps(loc2), loc2.overlaps(loc3), loc1.overlaps(loc3)
True True False
C{precedes} and C{succeeds} test if a location occurs entirely before
or after another location.
>>> loc1 = CharSpanLocation(8,12)
>>> loc2 = CharSpanLocation(14,15)
>>> print loc1.precedes(loc2), loc2.succeeds(loc1)
True True
>>> print loc1.succeeds(loc2), loc2.precedes(loc1)
False False
>>> loc1.succeeds(loc1)
False
loc1 precedes loc2 if they share a common boundary:
>>> loc1 = CharSpanLocation(8, 12)
>>> loc2 = CharSpanLocation(12, 14)
>>> loc1.precedes(loc2)
True
loc1 can preceed loc2 if I{either} is zero-length:
>>> loc1 = CharSpanLocation(12,12)
>>> loc2 = CharSpanLocation(12,14)
>>> loc3 = CharSpanLocation(14,14)
>>> print loc1.precedes(loc2), loc2.precedes(loc3)
True True
But not if both are zero-length:
>>> loc1 = CharSpanLocation(12,12)
>>> print loc1.precedes(loc1)
False
Note that I{precedes} and I{succeds} are anti-symmetric,
anti-reflexive, and transitive.
For any two locations, exactly one of the following will always be
true:
- C{loc1.precedes(loc2)}
- C{loc1.succeeds(loc2)}
- C{loc1.overlaps(loc2)}
To compare locations with precedes, succeeds, and overlaps, they must
have compatible sources and location types:
>>> loc1 = CharSpanLocation(8,12, source='foo.txt')
>>> loc2 = CharSpanLocation(8,12, source='bar.txt')
>>> loc3 = WordIndexLocation(1, source='foo.txt')
>>> loc1.precedes(loc2)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible sources
>>> loc1.precedes(loc3)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible types
>>> loc1.succeeds(loc2)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible sources
>>> loc1.succeeds(loc3)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible types
>>> loc1.overlaps(loc2)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible sources
>>> loc1.overlaps(loc3)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible types
Contiguous Locations & Union
----------------------------
Two locations are contiguous if they share a common boundary:
>>> loc1 = CharSpanLocation(8,12)
>>> loc2 = CharSpanLocation(12,14)
>>> print loc1.contiguous(loc2), loc2.contiguous(loc1)
True True
>>> loc3 = CharSpanLocation(13, 14)
>>> loc1.contiguous(loc3)
False
Either location can be zero-length:
>>> loc1 = CharSpanLocation(8,8)
>>> loc2 = CharSpanLocation(8,12)
>>> loc3 = CharSpanLocation(12,12)
>>> print loc1.contiguous(loc2), loc2.contiguous(loc3)
True True
Or both can be:
>>> loc1 = CharSpanLocation(8,8)
>>> print loc1.contiguous(loc1)
True
If two locations are contiguous, then they can be joined via C{union},
which returns a new location spanning both of them:
>>> loc1 = CharSpanLocation(8,12)
>>> loc2 = CharSpanLocation(12,14)
>>> print loc1.union(loc2)
[8:14c]
>>> print loc2.union(loc1)
[8:14c]
Union can also be written as addition:
>>> print loc1 + loc2
[8:14c]
If the locations are not contiguous, they cannot be joined:
>>> loc1 = CharSpanLocation(8,12)
>>> loc2 = CharSpanLocation(13,14)
>>> loc1.union(loc2)
Traceback (most recent call last):
[...]
ValueError: Locations are not contiguous
To compare locations with contiguous(), or to take their union, they
must have compatible sources and location types:
>>> loc1 = CharSpanLocation(8,12, source='foo.txt')
>>> loc2 = CharSpanLocation(8,12, source='bar.txt')
>>> loc3 = WordIndexLocation(1, source='foo.txt')
>>> loc1.contiguous(loc2)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible sources
>>> loc1.contiguous(loc3)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible types
>>> loc1.union(loc2)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible sources
>>> loc1.union(loc3)
Traceback (most recent call last):
[...]
ValueError: Locations have incompatible types
Infinity
--------
Under special circumstances, it can be useful to use -INF as a
location's start, or +INF as its end. This is done with
C{SpanLocation.MIN} and C{SpanLocation.MAX}:
>>> loc1 = CharSpanLocation(SpanLocation.MIN, 8)
>>> loc2 = CharSpanLocation(8, 12)
>>> loc3 = CharSpanLocation(12, SpanLocation.MAX)
>>> print loc1, loc2, loc3
[-INF:8c] [8:12c] [12:+INFc]
>>> print loc1.precedes(loc2), loc2.precedes(loc3)
True True
>>> print loc1+loc2+loc3
[-INF:+INFc]
Select
------
L{SpanLocation.select} can be used to select the text specified by a
location:
>>> text = 'a small frog is sleeping'
>>> loc = CharSpanLocation(8, 12)
>>> loc.select(text)
'frog'
IndexLocations
==============
C{IndexLocation} is an abstract base class for locations that are based
on indexes. It can't be instantiated directly:
>>> IndexLocation(0)
Traceback (most recent call last):
[...]
AssertionError: Abstract classes can't be instantiated
It is implemented by C{WordIndexLocation}, C{SentIndexLocation}, and
C{ParaIndexLocation}:
>>> loc1 = WordIndexLocation(1)
>>> loc2 = SentIndexLocation(2)
>>> loc3 = ParaIndexLocation(3)
>>> print loc1, loc2, loc3
[1w] [2s] [3p]
>>> print loc1.index(), loc2.index(), loc3.index()
1 2 3
Index locations can have sources:
>>> loc1 = WordIndexLocation(1, 'foo.txt')
>>> loc2 = SentIndexLocation(2, 'bar.txt')
>>> loc3 = ParaIndexLocation(3, 'baz.txt')
>>> print loc1, loc2, loc3
[1w]@foo.txt [2s]@bar.txt [3p]@baz.txt
>>> loc1.source()
'foo.txt'
Sometimes it can be useful to use index locations as sources for other
index locations, to provide hierarchical location specifications:
>>> loc1 = ParaIndexLocation(8, 'foo.txt')
>>> loc2 = SentIndexLocation(3, loc1)
>>> loc3 = WordIndexLocation(5, loc2)
>>> print loc3
[5w]@[3s]@[8p]@foo.txt
Index locations are hashable, and so can be used as dictionary keys:
>>> {WordIndexLocation(3): 1}
{[3w]: 1}
Index locations are ordered, and can be compared with cmp:
>>> loc1 = WordIndexLocation(3)
>>> loc2 = WordIndexLocation(5)
>>> loc1 < loc2
True
"""
# def test_ProbabilisticToken(): """
# Probablistic tokens will probably be deprecated. But in the mean
# time, this covers the code in ProbabilisticToken:
# >>> ProbabilisticToken(0.25, TEXT='dog')
# <dog> (p=0.25)
# """
def test_demo(): r"""
Unit tests for L{nltk.token.demo}.
>>> mark_stdout_newlines(demo)
______________________________________________________________________
loc = CharSpanLocation(3, 13, source='corpus.txt')
loc2 = CharSpanLocation(20, 25, source='corpus.txt')
<--BLANKLINE-->
print loc => [3:13c]@corpus.txt
print loc.start => 3
print loc.end => 13
print loc.length() => 10
print loc.source => corpus.txt
print loc2 => [20:25c]@corpus.txt
print loc.precedes(loc2) => True
print loc.succeeds(loc2) => False
print loc.overlaps(loc2) => False
______________________________________________________________________
tok = Token(TEXT='flattening', TAG='VBG', LOC=loc)
tok2 = Token(SIZE=12, WEIGHT=83, LOC=loc2)
<--BLANKLINE-->
print tok => <flattening/VBG>@[3:13c]
print tok['LOC'] => [3:13c]@corpus.txt
print tok.exclude('LOC') => <flattening/VBG>
print tok.exclude('TEXT') => <TAG='VBG'>@[3:13c]
print tok.project('TEXT') => <flattening>
print tok2 => <SIZE=12, WEIGHT=83>@[20:25c]
print tok2['LOC'] => [20:25c]@corpus.txt
print tok == tok2 => False
print tok == tok.copy() => True
"""
#######################################################################
# Test Runner
#######################################################################
import sys, os, os.path
if __name__ == '__main__': sys.path[0] = None
import unittest, doctest, trace
def testsuite(reload_module=False):
import doctest, nltk.test.token
if reload_module: reload(nltk.test.token)
return doctest.DocTestSuite(nltk.test.token)
def test(verbosity=2, reload_module=False):
runner = unittest.TextTestRunner(verbosity=verbosity)
runner.run(testsuite(reload_module))
if __name__ == '__main__':
test(reload_module=True)
| 27.980413 | 75 | 0.60224 | 3,352 | 25,714 | 4.556683 | 0.1429 | 0.029789 | 0.043538 | 0.052704 | 0.436101 | 0.353477 | 0.286958 | 0.270001 | 0.230195 | 0.220767 | 0 | 0.03578 | 0.220697 | 25,714 | 918 | 76 | 28.010893 | 0.726433 | 0.941355 | 0 | 0.349432 | 0 | 0.015625 | 0.962064 | 0.098803 | 0 | 0 | 0 | 0 | 0.009943 | 1 | 0.012784 | false | 0.002841 | 0.007102 | 0 | 0.021307 | 0.109375 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f2646ce36d6581ebe702f50f5d9fc46cbfd6387 | 3,332 | py | Python | pythonProjects/guess_number/guess_number.py | chisma/pythonProjects | f22f4e8dae33827696ed6ad3168d1cc83f974150 | [
"MIT"
] | null | null | null | pythonProjects/guess_number/guess_number.py | chisma/pythonProjects | f22f4e8dae33827696ed6ad3168d1cc83f974150 | [
"MIT"
] | null | null | null | pythonProjects/guess_number/guess_number.py | chisma/pythonProjects | f22f4e8dae33827696ed6ad3168d1cc83f974150 | [
"MIT"
] | null | null | null | import random
def guess(x):
random_number = random.randint(1, x)
guess = 0
while guess != random_number:
guess = int(input(
f"Python generated a lucky number for you. It's your turn to GUESS the number now! Hint: Number is between 1 and {x}: "))
if guess < random_number:
print(
"Sorry! Try again with a higher number!")
elif guess > random_number:
print(
"Sorry! Try again with a smaller number!")
print(
f"Your guess is correct!The random number generated by Python was {random_number}")
def computer_guess():
computer_guess = 0
my_secret_number = 1
print("Think of a number between 1 and 10")
input("Hit enter when you are ready!")
computer_guess = random.randint(1, 10)
while(computer_guess != my_secret_number):
user_response = input(
f"Is your secret number = {computer_guess}?(Y/N)")
if user_response == "N" or user_response == "n":
if(computer_guess < 10 and computer_guess > 0):
user_response1 = input(
f"Is your secret number > {computer_guess}?(Y/N)")
if user_response1 == "Y" or user_response1 == "y":
computer_guess += 1
elif user_response1 == "N" or user_response1 == "n":
computer_guess -= 1
elif computer_guess < 10 or computer_guess > 0:
print("I have exceeded maximum tries, I give up!")
break
elif user_response == "Y" or user_response == "y":
my_secret_number = computer_guess
print(
f"I am a genius computer!Your secret number was {computer_guess}!!!")
break
def smarter_computer_guess():
print("*******Guessing Game*********")
print("Let the computer guess the secret number in your mind!")
print("Think of a number between 1 and 100")
input("Press ENTER when you are ready!!!")
secret_number = -1
computer_guess = 0
starting_number_in_range = 1
ending_number_in_range = 100
while(computer_guess != secret_number):
computer_guess = random.randint(
starting_number_in_range, ending_number_in_range)
guessed_the_number = input(
f"Is your secret number = {computer_guess}?(Y/N): ").lower()
if guessed_the_number == "N" or guessed_the_number == "n":
user_response = input(
f"Is your secret number > {computer_guess}?(Y/N): ").lower()
if user_response == "y":
starting_number_in_range = computer_guess+1
computer_guess = random.randint(
starting_number_in_range, ending_number_in_range)
elif user_response == "n":
ending_number_in_range = computer_guess
computer_guess = random.randint(
starting_number_in_range, ending_number_in_range)
elif guessed_the_number == "y":
secret_number = computer_guess
print(
f"I am a genius computer!!!Your secret number was {computer_guess}")
else:
print("That's an invalid response, PRESS Y for Yes OR N for No :)")
# guess(88)
# computer_guess()
smarter_computer_guess()
| 39.666667 | 133 | 0.592737 | 421 | 3,332 | 4.465558 | 0.211401 | 0.200532 | 0.069149 | 0.093085 | 0.428723 | 0.379787 | 0.379787 | 0.379787 | 0.347872 | 0.305319 | 0 | 0.016178 | 0.313625 | 3,332 | 83 | 134 | 40.144578 | 0.805859 | 0.007803 | 0 | 0.236111 | 1 | 0.013889 | 0.277021 | 0.027248 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | false | 0 | 0.013889 | 0 | 0.055556 | 0.152778 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f266aed6cb59b4094ca5da5b7023d58572f52b6 | 9,705 | py | Python | catalog/harvest/initial_harverster_s2.py | eoss-cloud/madxxx_catalog_api | ef37374a36129de4f0a6fe5dd46b5bc2e2f01d1d | [
"MIT"
] | null | null | null | catalog/harvest/initial_harverster_s2.py | eoss-cloud/madxxx_catalog_api | ef37374a36129de4f0a6fe5dd46b5bc2e2f01d1d | [
"MIT"
] | null | null | null | catalog/harvest/initial_harverster_s2.py | eoss-cloud/madxxx_catalog_api | ef37374a36129de4f0a6fe5dd46b5bc2e2f01d1d | [
"MIT"
] | null | null | null | #-*- coding: utf-8 -*-
""" EOSS catalog system
Reads sentinel2 data which is stored in AWS buckets extracted with 'aws s3 ls sentinel-s2-l1c/products/ --recursive --region=eu-central-1 | grep productInfo.json'
[
"""
__author__ = "Thilo Wehrmann, Steffen Gebhardt"
__copyright__ = "Copyright 2016, EOSS GmbH"
__credits__ = ["Thilo Wehrmann", "Steffen Gebhardt"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Thilo Wehrmann"
__email__ = "twehrmann@eoss.cloud"
__status__ = "Production"
import ujson
import dateutil.parser
import xmltodict
import datetime
from api.eoss_api import Api
from harvest import count_lines
from manage.sentinelcatalog import SENTINEL_S3_BUCKET, SENTINEL_S3_HTTP_ZIP_BASEURL, \
SENTINEL_S3_HTTP_BASEURL, SentinelCatalog
from model.plain_models import SentinelS3Container, Catalog_Dataset
from utilities.web_utils import remote_file_exists, public_key_exists, public_get_filestream
def sentinel_harvester(in_csv, N, M=1000):
datasets = []
with open(in_csv, 'r') as f:
for counter, line in enumerate(f):
content_list = line.split(' ')
tileinfokey = content_list[-1]
tileinfokey = tileinfokey.rstrip("\n")
quicklookkey = tileinfokey.replace('tileInfo.json', 'preview.jpg')
if counter < N + M and counter >= N:
if public_key_exists(SENTINEL_S3_BUCKET, tileinfokey) and public_key_exists(SENTINEL_S3_BUCKET,
quicklookkey):
tilenfodict = ujson.loads(public_get_filestream(SENTINEL_S3_BUCKET, tileinfokey))
productkey = tilenfodict['productPath']
s3 = SentinelS3Container()
s3.bucket = SENTINEL_S3_BUCKET
s3.tile = tilenfodict['path'] + '/'
s3.quicklook = quicklookkey
dataset = Catalog_Dataset()
dataset.entity_id = tilenfodict['productName']
dataset.tile_identifier = '%02d%s%s' % (
tilenfodict['utmZone'], tilenfodict['latitudeBand'], tilenfodict['gridSquare'])
dataset.clouds = tilenfodict['cloudyPixelPercentage']
dataset.acq_time = dateutil.parser.parse(tilenfodict['timestamp'])
if public_key_exists(SENTINEL_S3_BUCKET, productkey + '/metadata.xml'):
s3.product = productkey + '/'
metadatakey = productkey + '/metadata.xml'
metadatadict = xmltodict.parse(
public_get_filestream(SENTINEL_S3_BUCKET, metadatakey))
dataset.sensor = \
metadatadict['n1:Level-1C_User_Product']['n1:General_Info']['Product_Info']['Datatake'][
'SPACECRAFT_NAME']
dataset.level = metadatadict['n1:Level-1C_User_Product']['n1:General_Info']['Product_Info'][
'PROCESSING_LEVEL']
daynight = 'day'
if metadatadict['n1:Level-1C_User_Product']['n1:General_Info']['Product_Info']['Datatake'][
'SENSING_ORBIT_DIRECTION'] != 'DESCENDING':
daynight = 'night'
dataset.daynight = daynight
quicklookurl = SENTINEL_S3_HTTP_BASEURL + tilenfodict['path'] + '/preview.jpg'
metadataurl = SENTINEL_S3_HTTP_BASEURL + productkey + '/metadata.xml'
container = dict()
if remote_file_exists(quicklookurl):
container['quicklook'] = quicklookurl
if remote_file_exists(metadataurl):
container['metadata'] = metadataurl
if remote_file_exists(SENTINEL_S3_HTTP_ZIP_BASEURL + dataset.entity_id + '.zip'):
s3.zip = SENTINEL_S3_HTTP_ZIP_BASEURL + dataset.entity_id + '.zip'
if s3.zip != None or s3.bucket != None:
container.update(s3.to_dict())
dataset.resources = container
datasets.append(dataset)
print counter, 'processed...', N
return datasets
def sentinel_harvester_line(lines):
datasets = []
for line in lines:
content_list = line.split(' ')
tileinfokey = content_list[-1]
tileinfokey = tileinfokey.rstrip("\n")
quicklookkey = tileinfokey.replace('tileInfo.json', 'preview.jpg')
if public_key_exists(SENTINEL_S3_BUCKET, tileinfokey) and public_key_exists(SENTINEL_S3_BUCKET, quicklookkey):
tilenfodict = ujson.loads(public_get_filestream(SENTINEL_S3_BUCKET, tileinfokey))
productkey = tilenfodict['productPath']
s3 = SentinelS3Container()
s3.bucket = SENTINEL_S3_BUCKET
s3.tile = tilenfodict['path'] + '/'
s3.quicklook = quicklookkey
dataset = Catalog_Dataset()
dataset.entity_id = tilenfodict['productName']
dataset.tile_identifier = '%02d%s%s' % (
tilenfodict['utmZone'], tilenfodict['latitudeBand'], tilenfodict['gridSquare'])
dataset.clouds = tilenfodict['cloudyPixelPercentage']
dataset.acq_time = dateutil.parser.parse(tilenfodict['timestamp'])
if public_key_exists(SENTINEL_S3_BUCKET, productkey + '/metadata.xml'):
s3.product = productkey + '/'
metadatakey = productkey + '/metadata.xml'
metadatadict = xmltodict.parse(
public_get_filestream(SENTINEL_S3_BUCKET, metadatakey))
dataset.sensor = \
metadatadict['n1:Level-1C_User_Product']['n1:General_Info']['Product_Info']['Datatake'][
'SPACECRAFT_NAME']
dataset.level = metadatadict['n1:Level-1C_User_Product']['n1:General_Info']['Product_Info'][
'PROCESSING_LEVEL']
daynight = 'day'
if metadatadict['n1:Level-1C_User_Product']['n1:General_Info']['Product_Info']['Datatake'][
'SENSING_ORBIT_DIRECTION'] != 'DESCENDING':
daynight = 'night'
dataset.daynight = daynight
quicklookurl = SENTINEL_S3_HTTP_BASEURL + tilenfodict['path'] + '/preview.jpg'
metadataurl = SENTINEL_S3_HTTP_BASEURL + productkey + '/metadata.xml'
container = dict()
if remote_file_exists(quicklookurl):
container['quicklook'] = quicklookurl
if remote_file_exists(metadataurl):
container['metadata'] = metadataurl
if remote_file_exists(SENTINEL_S3_HTTP_ZIP_BASEURL + dataset.entity_id + '.zip'):
s3.zip = SENTINEL_S3_HTTP_ZIP_BASEURL + dataset.entity_id + '.zip'
if s3.zip != None or s3.bucket != None:
container.update(s3.to_dict())
dataset.resources = container
datasets.append(dataset)
return datasets
def import_from_file_s2(in_csv, block_size):
import pprint
n, m = (0, block_size)
api = Api()
for n in range(0, count_lines(in_csv), m):
print 'Range: <%d:%d>' % (n, n + m)
datasets = sentinel_harvester(in_csv, n, m)
out = api.create_dataset(datasets)
pprint.pprint(out)
def import_from_pipe_s2(lines):
import pprint
api = Api()
datasets = sentinel_harvester_line(lines)
out = api.create_dataset(datasets)
pprint.pprint(out)
def import_from_sentinel_catalog(sensor,start_date, api_url):
import numpy
api = Api(api_url)
max_cloud_ratio = 1.0
ag_season_start = dateutil.parser.parse(start_date)
ag_season_end = ag_season_start + datetime.timedelta(days=1)
for lon in numpy.arange(-180,180,9):
for lat in numpy.arange(-90,90,9):
lon_end = lon + 9
lat_end = lat + 9
aoi_se = (lon_end, lat)
aoi_nw = (lon, lat_end)
aoi_ne = (aoi_se[0], aoi_nw[1])
aoi_sw = (aoi_nw[0], aoi_se[1])
aoi = [aoi_nw, aoi_ne, aoi_se, aoi_sw, aoi_nw]
cat = SentinelCatalog()
datasets = cat.find(sensor, aoi, ag_season_start, ag_season_end, max_cloud_ratio)
if datasets != None:
ds_found = list()
ds_missing = list()
for counter, ds in enumerate(datasets):
catalog_ds = api.get_dataset(ds.entity_id)
if catalog_ds is None or len(catalog_ds) == 0:
ds_missing.append(ds)
elif len(catalog_ds) == 1:
ds_found.append(catalog_ds)
else:
print 'More in catalog found: %s (%d)' % (ds.entity_id, len(catalog_ds))
if (counter % 25) == 0:
print counter, len(datasets)
print 'already registered: ', len(ds_found), len(datasets)
print 'missing: ', len(ds_missing), len(datasets)
for counter, ds_obj in enumerate(ds_missing):
new_ds = api.create_dataset(ds_obj)
if not new_ds is None:
print new_ds
if (counter % 25) == 0:
print counter, len(ds_missing)
else:
print 'No data found in catalog for %s from %s to %s' % (
sensor, ag_season_start.strftime("%Y-%m-%d"), ag_season_end.strftime("%Y-%m-%d"))
| 43.716216 | 162 | 0.578362 | 1,010 | 9,705 | 5.287129 | 0.212871 | 0.043071 | 0.038951 | 0.025843 | 0.638202 | 0.633708 | 0.624719 | 0.624719 | 0.613858 | 0.613858 | 0 | 0.017914 | 0.321278 | 9,705 | 221 | 163 | 43.914027 | 0.792774 | 0.002164 | 0 | 0.590909 | 0 | 0 | 0.123485 | 0.024444 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.085227 | null | null | 0.073864 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f32b28ef06f4c9691ebde7eacf5ccd2c60aace0 | 163 | py | Python | Ex 06.py | brunobendel/Exercicios-python-Pycharm | 145ded6cb5533aeef1b89f0bce20f0a90e37216c | [
"MIT"
] | null | null | null | Ex 06.py | brunobendel/Exercicios-python-Pycharm | 145ded6cb5533aeef1b89f0bce20f0a90e37216c | [
"MIT"
] | null | null | null | Ex 06.py | brunobendel/Exercicios-python-Pycharm | 145ded6cb5533aeef1b89f0bce20f0a90e37216c | [
"MIT"
] | null | null | null | import math
a = int(input('digite um numero:'))
print('O dobro do valor digitado é: {}\nO triplo é: {}\nA Raiz quadrada é: {}'.format((a*2),(a*3),(math.sqrt(a))))
| 40.75 | 114 | 0.631902 | 30 | 163 | 3.433333 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014085 | 0.128834 | 163 | 3 | 115 | 54.333333 | 0.711268 | 0 | 0 | 0 | 0 | 0 | 0.533742 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
8f38228c5f10a4b1bfb3ff3d3be495b938c13abf | 1,063 | py | Python | tests/unit/rules/contexts/count.py | translationexchange/tml-python | d09e36f0319f5d3ac0b83ee84b8848d2b2e8e481 | [
"MIT"
] | 1 | 2017-05-16T18:54:18.000Z | 2017-05-16T18:54:18.000Z | tests/unit/rules/contexts/count.py | translationexchange/tml-python | d09e36f0319f5d3ac0b83ee84b8848d2b2e8e481 | [
"MIT"
] | 2 | 2021-03-22T16:55:59.000Z | 2021-12-13T19:35:12.000Z | tests/unit/rules/contexts/count.py | translationexchange/tml-python | d09e36f0319f5d3ac0b83ee84b8848d2b2e8e481 | [
"MIT"
] | 1 | 2016-08-15T14:41:18.000Z | 2016-08-15T14:41:18.000Z | # encoding: UTF-8
""" Test rules built-in functions """
from __future__ import absolute_import
import unittest
from tml.rules.contexts.count import *
import six
class WithLength(object):
def __len__(self):
return 10
class WithoutLength(object):
pass
class CountTest(unittest.TestCase):
""" Test count """
def test_list(self):
self.assertEquals(3, Count.match([1,2,3]), 'List')
def test_tuple(self):
self.assertEquals(2, Count.match((100, 500)), 'Tuple')
def test_object(self):
self.assertEquals(10, Count.match(WithLength()), 'Object with length getter')
with self.assertRaises(ArgumentError):
Count.match(WithoutLength())
def test_string(self):
with self.assertRaises(ArgumentError):
Count.match('Hello world')
with self.assertRaises(ArgumentError):
Count.match(six.u('Hello world'))
def test_dict(self):
self.assertEquals(2, Count.match({'a':'A', 'b':'B'}), 'Count dict')
if __name__ == '__main__':
unittest.main()
| 25.926829 | 85 | 0.648166 | 129 | 1,063 | 5.170543 | 0.410853 | 0.104948 | 0.11994 | 0.148426 | 0.286357 | 0.286357 | 0 | 0 | 0 | 0 | 0 | 0.020311 | 0.212606 | 1,063 | 40 | 86 | 26.575 | 0.776583 | 0.055503 | 0 | 0.111111 | 0 | 0 | 0.078788 | 0 | 0 | 0 | 0 | 0 | 0.259259 | 1 | 0.222222 | false | 0.037037 | 0.148148 | 0.037037 | 0.518519 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
8f3c5e333599fdc4cb51a82f876ae8020f57a096 | 167 | py | Python | critters/fac-rec.py | vagoff/shootout | e2f085436cdfb6945db55dc92a9de46e925b8f39 | [
"MIT"
] | null | null | null | critters/fac-rec.py | vagoff/shootout | e2f085436cdfb6945db55dc92a9de46e925b8f39 | [
"MIT"
] | null | null | null | critters/fac-rec.py | vagoff/shootout | e2f085436cdfb6945db55dc92a9de46e925b8f39 | [
"MIT"
] | null | null | null | import sys
import math
sys.setrecursionlimit(999000)
def fac(x):
if x < 2:
return 1
return x * fac(x - 1)
print(math.log10( fac(int(sys.argv[1])) ))
| 15.181818 | 42 | 0.616766 | 28 | 167 | 3.678571 | 0.571429 | 0.07767 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094488 | 0.239521 | 167 | 10 | 43 | 16.7 | 0.716535 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.25 | 0 | 0.625 | 0.125 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
8f3d42e4319e4d469d3130e2ebbeefda3a9be8b3 | 553 | py | Python | whyis/autonomic/global_change_service.py | tolulomo/whyis | eb50ab3301eb7efd27a1a3f6fb2305dedd910397 | [
"Apache-2.0"
] | 31 | 2018-05-30T02:41:23.000Z | 2021-10-17T01:25:20.000Z | whyis/autonomic/global_change_service.py | tolulomo/whyis | eb50ab3301eb7efd27a1a3f6fb2305dedd910397 | [
"Apache-2.0"
] | 115 | 2018-04-07T00:59:11.000Z | 2022-03-02T03:06:45.000Z | whyis/autonomic/global_change_service.py | tolulomo/whyis | eb50ab3301eb7efd27a1a3f6fb2305dedd910397 | [
"Apache-2.0"
] | 25 | 2018-04-07T00:49:55.000Z | 2021-09-28T14:29:18.000Z | from builtins import str
import sadi
import rdflib
import setlr
from datetime import datetime
from .service import Service
from nanopub import Nanopublication
from datastore import create_id
import flask
from flask import render_template
from flask import render_template_string
import logging
import sys, traceback
import database
import tempfile
from depot.io.interfaces import StoredFile
from whyis.namespace import whyis
class GlobalChangeService(Service):
@property
def query_predicate(self):
return whyis.globalChangeQuery
| 18.433333 | 42 | 0.824593 | 71 | 553 | 6.352113 | 0.535211 | 0.039911 | 0.066519 | 0.093126 | 0.128603 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151899 | 553 | 29 | 43 | 19.068966 | 0.96162 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.809524 | 0.047619 | 0.952381 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
8f4ba8756914cc8a83e68158bde5ac2c5e1c8691 | 162 | py | Python | alloy_related/alloyToRailML/parserAlloy/level.py | pedrordgs/RailML-Utilities | f8842860acc263ab30da185ff2ca84c3ba7335f6 | [
"MIT"
] | 21 | 2021-06-23T03:09:42.000Z | 2021-12-22T16:02:20.000Z | alloy_related/alloyToRailML/parserAlloy/level.py | pedrordgs/RailML-Utilities | f8842860acc263ab30da185ff2ca84c3ba7335f6 | [
"MIT"
] | null | null | null | alloy_related/alloyToRailML/parserAlloy/level.py | pedrordgs/RailML-Utilities | f8842860acc263ab30da185ff2ca84c3ba7335f6 | [
"MIT"
] | null | null | null | class Level:
def __init__(self, ident, desc, nresources):
self.id = ident
self.description = desc
self.networkResources = nresources
| 23.142857 | 48 | 0.641975 | 17 | 162 | 5.882353 | 0.647059 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.277778 | 162 | 6 | 49 | 27 | 0.854701 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0 | 0 | 0.4 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f51b1f418a90f98dcafef44fa8b0b18f313cb00 | 64,106 | py | Python | pytorch2caffe/caffe_pb2.py | wgggSg/pytorch2caffe | b22ce80339dd5f5fcec5a53f65fbf3b1c1d938ea | [
"MIT"
] | null | null | null | pytorch2caffe/caffe_pb2.py | wgggSg/pytorch2caffe | b22ce80339dd5f5fcec5a53f65fbf3b1c1d938ea | [
"MIT"
] | null | null | null | pytorch2caffe/caffe_pb2.py | wgggSg/pytorch2caffe | b22ce80339dd5f5fcec5a53f65fbf3b1c1d938ea | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: caffe.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x63\x61\x66\x66\x65.proto\x12\x05\x63\x61\x66\x66\x65\"\x1c\n\tBlobShape\x12\x0f\n\x03\x64im\x18\x01 \x03(\x03\x42\x02\x10\x01\"\xcc\x01\n\tBlobProto\x12\x1f\n\x05shape\x18\x07 \x01(\x0b\x32\x10.caffe.BlobShape\x12\x10\n\x04\x64\x61ta\x18\x05 \x03(\x02\x42\x02\x10\x01\x12\x10\n\x04\x64iff\x18\x06 \x03(\x02\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_data\x18\x08 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_diff\x18\t \x03(\x01\x42\x02\x10\x01\x12\x0e\n\x03num\x18\x01 \x01(\x05:\x01\x30\x12\x13\n\x08\x63hannels\x18\x02 \x01(\x05:\x01\x30\x12\x11\n\x06height\x18\x03 \x01(\x05:\x01\x30\x12\x10\n\x05width\x18\x04 \x01(\x05:\x01\x30\"2\n\x0f\x42lobProtoVector\x12\x1f\n\x05\x62lobs\x18\x01 \x03(\x0b\x32\x10.caffe.BlobProto\"\x81\x01\n\x05\x44\x61tum\x12\x10\n\x08\x63hannels\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\r\n\x05width\x18\x03 \x01(\x05\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12\r\n\x05label\x18\x05 \x01(\x05\x12\x12\n\nfloat_data\x18\x06 \x03(\x02\x12\x16\n\x07\x65ncoded\x18\x07 \x01(\x08:\x05\x66\x61lse\"\x8a\x02\n\x0f\x46illerParameter\x12\x16\n\x04type\x18\x01 \x01(\t:\x08\x63onstant\x12\x10\n\x05value\x18\x02 \x01(\x02:\x01\x30\x12\x0e\n\x03min\x18\x03 \x01(\x02:\x01\x30\x12\x0e\n\x03max\x18\x04 \x01(\x02:\x01\x31\x12\x0f\n\x04mean\x18\x05 \x01(\x02:\x01\x30\x12\x0e\n\x03std\x18\x06 \x01(\x02:\x01\x31\x12\x12\n\x06sparse\x18\x07 \x01(\x05:\x02-1\x12\x42\n\rvariance_norm\x18\x08 \x01(\x0e\x32#.caffe.FillerParameter.VarianceNorm:\x06\x46\x41N_IN\"4\n\x0cVarianceNorm\x12\n\n\x06\x46\x41N_IN\x10\x00\x12\x0b\n\x07\x46\x41N_OUT\x10\x01\x12\x0b\n\x07\x41VERAGE\x10\x02\"\x8e\x02\n\x0cNetParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x03 \x03(\t\x12%\n\x0binput_shape\x18\x08 \x03(\x0b\x32\x10.caffe.BlobShape\x12\x11\n\tinput_dim\x18\x04 \x03(\x05\x12\x1d\n\x0e\x66orce_backward\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x05state\x18\x06 \x01(\x0b\x32\x0f.caffe.NetState\x12\x19\n\ndebug_info\x18\x07 \x01(\x08:\x05\x66\x61lse\x12$\n\x05layer\x18\x64 \x03(\x0b\x32\x15.caffe.LayerParameter\x12\'\n\x06layers\x18\x02 \x03(\x0b\x32\x17.caffe.V1LayerParameter\"\xd4\n\n\x0fSolverParameter\x12\x0b\n\x03net\x18\x18 \x01(\t\x12&\n\tnet_param\x18\x19 \x01(\x0b\x32\x13.caffe.NetParameter\x12\x11\n\ttrain_net\x18\x01 \x01(\t\x12\x10\n\x08test_net\x18\x02 \x03(\t\x12,\n\x0ftrain_net_param\x18\x15 \x01(\x0b\x32\x13.caffe.NetParameter\x12+\n\x0etest_net_param\x18\x16 \x03(\x0b\x32\x13.caffe.NetParameter\x12$\n\x0btrain_state\x18\x1a \x01(\x0b\x32\x0f.caffe.NetState\x12#\n\ntest_state\x18\x1b \x03(\x0b\x32\x0f.caffe.NetState\x12\x11\n\ttest_iter\x18\x03 \x03(\x05\x12\x18\n\rtest_interval\x18\x04 \x01(\x05:\x01\x30\x12 \n\x11test_compute_loss\x18\x13 \x01(\x08:\x05\x66\x61lse\x12!\n\x13test_initialization\x18 \x01(\x08:\x04true\x12\x0f\n\x07\x62\x61se_lr\x18\x05 \x01(\x02\x12\x0f\n\x07\x64isplay\x18\x06 \x01(\x05\x12\x17\n\x0c\x61verage_loss\x18! \x01(\x05:\x01\x31\x12\x10\n\x08max_iter\x18\x07 \x01(\x05\x12\x14\n\titer_size\x18$ \x01(\x05:\x01\x31\x12\x11\n\tlr_policy\x18\x08 \x01(\t\x12\r\n\x05gamma\x18\t \x01(\x02\x12\r\n\x05power\x18\n \x01(\x02\x12\x10\n\x08momentum\x18\x0b \x01(\x02\x12\x14\n\x0cweight_decay\x18\x0c \x01(\x02\x12\x1f\n\x13regularization_type\x18\x1d \x01(\t:\x02L2\x12\x10\n\x08stepsize\x18\r \x01(\x05\x12\x11\n\tstepvalue\x18\" \x03(\x05\x12\x1a\n\x0e\x63lip_gradients\x18# \x01(\x02:\x02-1\x12\x13\n\x08snapshot\x18\x0e \x01(\x05:\x01\x30\x12\x17\n\x0fsnapshot_prefix\x18\x0f \x01(\t\x12\x1c\n\rsnapshot_diff\x18\x10 \x01(\x08:\x05\x66\x61lse\x12K\n\x0fsnapshot_format\x18% \x01(\x0e\x32%.caffe.SolverParameter.SnapshotFormat:\x0b\x42INARYPROTO\x12;\n\x0bsolver_mode\x18\x11 \x01(\x0e\x32!.caffe.SolverParameter.SolverMode:\x03GPU\x12\x14\n\tdevice_id\x18\x12 \x01(\x05:\x01\x30\x12\x17\n\x0brandom_seed\x18\x14 \x01(\x03:\x02-1\x12\x11\n\x04type\x18( \x01(\t:\x03SGD\x12\x14\n\x05\x64\x65lta\x18\x1f \x01(\x02:\x05\x31\x65-08\x12\x18\n\tmomentum2\x18\' \x01(\x02:\x05\x30.999\x12\x17\n\trms_decay\x18& \x01(\x02:\x04\x30.99\x12\x19\n\ndebug_info\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\"\n\x14snapshot_after_train\x18\x1c \x01(\x08:\x04true\x12;\n\x0bsolver_type\x18\x1e \x01(\x0e\x32!.caffe.SolverParameter.SolverType:\x03SGD\x12\x1f\n\x11layer_wise_reduce\x18) \x01(\x08:\x04true\x12\x0f\n\x07weights\x18* \x03(\t\"+\n\x0eSnapshotFormat\x12\x08\n\x04HDF5\x10\x00\x12\x0f\n\x0b\x42INARYPROTO\x10\x01\"\x1e\n\nSolverMode\x12\x07\n\x03\x43PU\x10\x00\x12\x07\n\x03GPU\x10\x01\"U\n\nSolverType\x12\x07\n\x03SGD\x10\x00\x12\x0c\n\x08NESTEROV\x10\x01\x12\x0b\n\x07\x41\x44\x41GRAD\x10\x02\x12\x0b\n\x07RMSPROP\x10\x03\x12\x0c\n\x08\x41\x44\x41\x44\x45LTA\x10\x04\x12\x08\n\x04\x41\x44\x41M\x10\x05\"l\n\x0bSolverState\x12\x0c\n\x04iter\x18\x01 \x01(\x05\x12\x13\n\x0blearned_net\x18\x02 \x01(\t\x12!\n\x07history\x18\x03 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x17\n\x0c\x63urrent_step\x18\x04 \x01(\x05:\x01\x30\"N\n\x08NetState\x12!\n\x05phase\x18\x01 \x01(\x0e\x32\x0c.caffe.Phase:\x04TEST\x12\x10\n\x05level\x18\x02 \x01(\x05:\x01\x30\x12\r\n\x05stage\x18\x03 \x03(\t\"s\n\x0cNetStateRule\x12\x1b\n\x05phase\x18\x01 \x01(\x0e\x32\x0c.caffe.Phase\x12\x11\n\tmin_level\x18\x02 \x01(\x05\x12\x11\n\tmax_level\x18\x03 \x01(\x05\x12\r\n\x05stage\x18\x04 \x03(\t\x12\x11\n\tnot_stage\x18\x05 \x03(\t\"\xa3\x01\n\tParamSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\nshare_mode\x18\x02 \x01(\x0e\x32\x1d.caffe.ParamSpec.DimCheckMode\x12\x12\n\x07lr_mult\x18\x03 \x01(\x02:\x01\x31\x12\x15\n\ndecay_mult\x18\x04 \x01(\x02:\x01\x31\"*\n\x0c\x44imCheckMode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"\xbe\x15\n\x0eLayerParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0e\n\x06\x62ottom\x18\x03 \x03(\t\x12\x0b\n\x03top\x18\x04 \x03(\t\x12\x1b\n\x05phase\x18\n \x01(\x0e\x32\x0c.caffe.Phase\x12\x13\n\x0bloss_weight\x18\x05 \x03(\x02\x12\x1f\n\x05param\x18\x06 \x03(\x0b\x32\x10.caffe.ParamSpec\x12\x1f\n\x05\x62lobs\x18\x07 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x16\n\x0epropagate_down\x18\x0b \x03(\x08\x12$\n\x07include\x18\x08 \x03(\x0b\x32\x13.caffe.NetStateRule\x12$\n\x07\x65xclude\x18\t \x03(\x0b\x32\x13.caffe.NetStateRule\x12\x37\n\x0ftransform_param\x18\x64 \x01(\x0b\x32\x1e.caffe.TransformationParameter\x12(\n\nloss_param\x18\x65 \x01(\x0b\x32\x14.caffe.LossParameter\x12\x30\n\x0e\x61\x63\x63uracy_param\x18\x66 \x01(\x0b\x32\x18.caffe.AccuracyParameter\x12,\n\x0c\x61rgmax_param\x18g \x01(\x0b\x32\x16.caffe.ArgMaxParameter\x12\x34\n\x10\x62\x61tch_norm_param\x18\x8b\x01 \x01(\x0b\x32\x19.caffe.BatchNormParameter\x12)\n\nbias_param\x18\x8d\x01 \x01(\x0b\x32\x14.caffe.BiasParameter\x12)\n\nclip_param\x18\x94\x01 \x01(\x0b\x32\x14.caffe.ClipParameter\x12,\n\x0c\x63oncat_param\x18h \x01(\x0b\x32\x16.caffe.ConcatParameter\x12?\n\x16\x63ontrastive_loss_param\x18i \x01(\x0b\x32\x1f.caffe.ContrastiveLossParameter\x12\x36\n\x11\x63onvolution_param\x18j \x01(\x0b\x32\x1b.caffe.ConvolutionParameter\x12)\n\ncrop_param\x18\x90\x01 \x01(\x0b\x32\x14.caffe.CropParameter\x12(\n\ndata_param\x18k \x01(\x0b\x32\x14.caffe.DataParameter\x12.\n\rdropout_param\x18l \x01(\x0b\x32\x17.caffe.DropoutParameter\x12\x33\n\x10\x64ummy_data_param\x18m \x01(\x0b\x32\x19.caffe.DummyDataParameter\x12.\n\reltwise_param\x18n \x01(\x0b\x32\x17.caffe.EltwiseParameter\x12\'\n\telu_param\x18\x8c\x01 \x01(\x0b\x32\x13.caffe.ELUParameter\x12+\n\x0b\x65mbed_param\x18\x89\x01 \x01(\x0b\x32\x15.caffe.EmbedParameter\x12&\n\texp_param\x18o \x01(\x0b\x32\x13.caffe.ExpParameter\x12/\n\rflatten_param\x18\x87\x01 \x01(\x0b\x32\x17.caffe.FlattenParameter\x12\x31\n\x0fhdf5_data_param\x18p \x01(\x0b\x32\x18.caffe.HDF5DataParameter\x12\x35\n\x11hdf5_output_param\x18q \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\x12\x33\n\x10hinge_loss_param\x18r \x01(\x0b\x32\x19.caffe.HingeLossParameter\x12\x33\n\x10image_data_param\x18s \x01(\x0b\x32\x19.caffe.ImageDataParameter\x12\x39\n\x13infogain_loss_param\x18t \x01(\x0b\x32\x1c.caffe.InfogainLossParameter\x12\x39\n\x13inner_product_param\x18u \x01(\x0b\x32\x1c.caffe.InnerProductParameter\x12+\n\x0binput_param\x18\x8f\x01 \x01(\x0b\x32\x15.caffe.InputParameter\x12\'\n\tlog_param\x18\x86\x01 \x01(\x0b\x32\x13.caffe.LogParameter\x12&\n\tlrn_param\x18v \x01(\x0b\x32\x13.caffe.LRNParameter\x12\x35\n\x11memory_data_param\x18w \x01(\x0b\x32\x1a.caffe.MemoryDataParameter\x12&\n\tmvn_param\x18x \x01(\x0b\x32\x13.caffe.MVNParameter\x12\x33\n\x0fparameter_param\x18\x91\x01 \x01(\x0b\x32\x19.caffe.ParameterParameter\x12.\n\rpooling_param\x18y \x01(\x0b\x32\x17.caffe.PoolingParameter\x12*\n\x0bpower_param\x18z \x01(\x0b\x32\x15.caffe.PowerParameter\x12+\n\x0bprelu_param\x18\x83\x01 \x01(\x0b\x32\x15.caffe.PReLUParameter\x12-\n\x0cpython_param\x18\x82\x01 \x01(\x0b\x32\x16.caffe.PythonParameter\x12\x33\n\x0frecurrent_param\x18\x92\x01 \x01(\x0b\x32\x19.caffe.RecurrentParameter\x12\x33\n\x0freduction_param\x18\x88\x01 \x01(\x0b\x32\x19.caffe.ReductionParameter\x12(\n\nrelu_param\x18{ \x01(\x0b\x32\x14.caffe.ReLUParameter\x12/\n\rreshape_param\x18\x85\x01 \x01(\x0b\x32\x17.caffe.ReshapeParameter\x12+\n\x0bscale_param\x18\x8e\x01 \x01(\x0b\x32\x15.caffe.ScaleParameter\x12.\n\rsigmoid_param\x18| \x01(\x0b\x32\x17.caffe.SigmoidParameter\x12.\n\rsoftmax_param\x18} \x01(\x0b\x32\x17.caffe.SoftmaxParameter\x12\'\n\tspp_param\x18\x84\x01 \x01(\x0b\x32\x13.caffe.SPPParameter\x12*\n\x0bslice_param\x18~ \x01(\x0b\x32\x15.caffe.SliceParameter\x12+\n\x0bswish_param\x18\x93\x01 \x01(\x0b\x32\x15.caffe.SwishParameter\x12(\n\ntanh_param\x18\x7f \x01(\x0b\x32\x14.caffe.TanHParameter\x12\x33\n\x0fthreshold_param\x18\x80\x01 \x01(\x0b\x32\x19.caffe.ThresholdParameter\x12)\n\ntile_param\x18\x8a\x01 \x01(\x0b\x32\x14.caffe.TileParameter\x12\x36\n\x11window_data_param\x18\x81\x01 \x01(\x0b\x32\x1a.caffe.WindowDataParameter\x12/\n\rpermute_param\x18\x96\x01 \x01(\x0b\x32\x17.caffe.PermuteParameter\x12\x31\n\x0eupsample_param\x18\x97\x01 \x01(\x0b\x32\x18.caffe.UpsampleParameter\"\xb6\x01\n\x17TransformationParameter\x12\x10\n\x05scale\x18\x01 \x01(\x02:\x01\x31\x12\x15\n\x06mirror\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x14\n\tcrop_size\x18\x03 \x01(\r:\x01\x30\x12\x11\n\tmean_file\x18\x04 \x01(\t\x12\x12\n\nmean_value\x18\x05 \x03(\x02\x12\x1a\n\x0b\x66orce_color\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\nforce_gray\x18\x07 \x01(\x08:\x05\x66\x61lse\"\xc2\x01\n\rLossParameter\x12\x14\n\x0cignore_label\x18\x01 \x01(\x05\x12\x44\n\rnormalization\x18\x03 \x01(\x0e\x32&.caffe.LossParameter.NormalizationMode:\x05VALID\x12\x11\n\tnormalize\x18\x02 \x01(\x08\"B\n\x11NormalizationMode\x12\x08\n\x04\x46ULL\x10\x00\x12\t\n\x05VALID\x10\x01\x12\x0e\n\nBATCH_SIZE\x10\x02\x12\x08\n\x04NONE\x10\x03\"L\n\x11\x41\x63\x63uracyParameter\x12\x10\n\x05top_k\x18\x01 \x01(\r:\x01\x31\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x14\n\x0cignore_label\x18\x03 \x01(\x05\"M\n\x0f\x41rgMaxParameter\x12\x1a\n\x0bout_max_val\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x05top_k\x18\x02 \x01(\r:\x01\x31\x12\x0c\n\x04\x61xis\x18\x03 \x01(\x05\")\n\rClipParameter\x12\x0b\n\x03min\x18\x01 \x02(\x02\x12\x0b\n\x03max\x18\x02 \x02(\x02\"9\n\x0f\x43oncatParameter\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x15\n\nconcat_dim\x18\x01 \x01(\r:\x01\x31\"j\n\x12\x42\x61tchNormParameter\x12\x18\n\x10use_global_stats\x18\x01 \x01(\x08\x12&\n\x17moving_average_fraction\x18\x02 \x01(\x02:\x05\x30.999\x12\x12\n\x03\x65ps\x18\x03 \x01(\x02:\x05\x31\x65-05\"]\n\rBiasParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x13\n\x08num_axes\x18\x02 \x01(\x05:\x01\x31\x12&\n\x06\x66iller\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\"L\n\x18\x43ontrastiveLossParameter\x12\x11\n\x06margin\x18\x01 \x01(\x02:\x01\x31\x12\x1d\n\x0elegacy_version\x18\x02 \x01(\x08:\x05\x66\x61lse\"\xfc\x03\n\x14\x43onvolutionParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12\x0b\n\x03pad\x18\x03 \x03(\r\x12\x13\n\x0bkernel_size\x18\x04 \x03(\r\x12\x0e\n\x06stride\x18\x06 \x03(\r\x12\x10\n\x08\x64ilation\x18\x12 \x03(\r\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x10\n\x08kernel_h\x18\x0b \x01(\r\x12\x10\n\x08kernel_w\x18\x0c \x01(\r\x12\x10\n\x08stride_h\x18\r \x01(\r\x12\x10\n\x08stride_w\x18\x0e \x01(\r\x12\x10\n\x05group\x18\x05 \x01(\r:\x01\x31\x12-\n\rweight_filler\x18\x07 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x08 \x01(\x0b\x32\x16.caffe.FillerParameter\x12;\n\x06\x65ngine\x18\x0f \x01(\x0e\x32\".caffe.ConvolutionParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x10 \x01(\x05:\x01\x31\x12\x1e\n\x0f\x66orce_nd_im2col\x18\x11 \x01(\x08:\x05\x66\x61lse\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"0\n\rCropParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x32\x12\x0e\n\x06offset\x18\x02 \x03(\r\"\xa4\x02\n\rDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x12\n\nbatch_size\x18\x04 \x01(\r\x12\x14\n\trand_skip\x18\x07 \x01(\r:\x01\x30\x12\x31\n\x07\x62\x61\x63kend\x18\x08 \x01(\x0e\x32\x17.caffe.DataParameter.DB:\x07LEVELDB\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13\x66orce_encoded_color\x18\t \x01(\x08:\x05\x66\x61lse\x12\x13\n\x08prefetch\x18\n \x01(\r:\x01\x34\"\x1b\n\x02\x44\x42\x12\x0b\n\x07LEVELDB\x10\x00\x12\x08\n\x04LMDB\x10\x01\".\n\x10\x44ropoutParameter\x12\x1a\n\rdropout_ratio\x18\x01 \x01(\x02:\x03\x30.5\"\xa0\x01\n\x12\x44ummyDataParameter\x12+\n\x0b\x64\x61ta_filler\x18\x01 \x03(\x0b\x32\x16.caffe.FillerParameter\x12\x1f\n\x05shape\x18\x06 \x03(\x0b\x32\x10.caffe.BlobShape\x12\x0b\n\x03num\x18\x02 \x03(\r\x12\x10\n\x08\x63hannels\x18\x03 \x03(\r\x12\x0e\n\x06height\x18\x04 \x03(\r\x12\r\n\x05width\x18\x05 \x03(\r\"\xa5\x01\n\x10\x45ltwiseParameter\x12\x39\n\toperation\x18\x01 \x01(\x0e\x32!.caffe.EltwiseParameter.EltwiseOp:\x03SUM\x12\r\n\x05\x63oeff\x18\x02 \x03(\x02\x12\x1e\n\x10stable_prod_grad\x18\x03 \x01(\x08:\x04true\"\'\n\tEltwiseOp\x12\x08\n\x04PROD\x10\x00\x12\x07\n\x03SUM\x10\x01\x12\x07\n\x03MAX\x10\x02\" \n\x0c\x45LUParameter\x12\x10\n\x05\x61lpha\x18\x01 \x01(\x02:\x01\x31\"\xac\x01\n\x0e\x45mbedParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x11\n\tinput_dim\x18\x02 \x01(\r\x12\x17\n\tbias_term\x18\x03 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\"D\n\x0c\x45xpParameter\x12\x10\n\x04\x62\x61se\x18\x01 \x01(\x02:\x02-1\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"9\n\x10\x46lattenParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x14\n\x08\x65nd_axis\x18\x02 \x01(\x05:\x02-1\"O\n\x11HDF5DataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x12\n\nbatch_size\x18\x02 \x01(\r\x12\x16\n\x07shuffle\x18\x03 \x01(\x08:\x05\x66\x61lse\"(\n\x13HDF5OutputParameter\x12\x11\n\tfile_name\x18\x01 \x01(\t\"^\n\x12HingeLossParameter\x12\x30\n\x04norm\x18\x01 \x01(\x0e\x32\x1e.caffe.HingeLossParameter.Norm:\x02L1\"\x16\n\x04Norm\x12\x06\n\x02L1\x10\x01\x12\x06\n\x02L2\x10\x02\"\x97\x02\n\x12ImageDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x15\n\nbatch_size\x18\x04 \x01(\r:\x01\x31\x12\x14\n\trand_skip\x18\x07 \x01(\r:\x01\x30\x12\x16\n\x07shuffle\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x15\n\nnew_height\x18\t \x01(\r:\x01\x30\x12\x14\n\tnew_width\x18\n \x01(\r:\x01\x30\x12\x16\n\x08is_color\x18\x0b \x01(\x08:\x04true\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x15\n\x0broot_folder\x18\x0c \x01(\t:\x00\"8\n\x15InfogainLossParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\"\xcb\x01\n\x15InnerProductParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0f\n\x04\x61xis\x18\x05 \x01(\x05:\x01\x31\x12\x18\n\ttranspose\x18\x06 \x01(\x08:\x05\x66\x61lse\"1\n\x0eInputParameter\x12\x1f\n\x05shape\x18\x01 \x03(\x0b\x32\x10.caffe.BlobShape\"D\n\x0cLogParameter\x12\x10\n\x04\x62\x61se\x18\x01 \x01(\x02:\x02-1\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"\xb8\x02\n\x0cLRNParameter\x12\x15\n\nlocal_size\x18\x01 \x01(\r:\x01\x35\x12\x10\n\x05\x61lpha\x18\x02 \x01(\x02:\x01\x31\x12\x12\n\x04\x62\x65ta\x18\x03 \x01(\x02:\x04\x30.75\x12\x44\n\x0bnorm_region\x18\x04 \x01(\x0e\x32\x1e.caffe.LRNParameter.NormRegion:\x0f\x41\x43ROSS_CHANNELS\x12\x0c\n\x01k\x18\x05 \x01(\x02:\x01\x31\x12\x33\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x1a.caffe.LRNParameter.Engine:\x07\x44\x45\x46\x41ULT\"5\n\nNormRegion\x12\x13\n\x0f\x41\x43ROSS_CHANNELS\x10\x00\x12\x12\n\x0eWITHIN_CHANNEL\x10\x01\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"Z\n\x13MemoryDataParameter\x12\x12\n\nbatch_size\x18\x01 \x01(\r\x12\x10\n\x08\x63hannels\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\r\n\x05width\x18\x04 \x01(\r\"d\n\x0cMVNParameter\x12 \n\x12normalize_variance\x18\x01 \x01(\x08:\x04true\x12\x1e\n\x0f\x61\x63ross_channels\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x12\n\x03\x65ps\x18\x03 \x01(\x02:\x05\x31\x65-09\"5\n\x12ParameterParameter\x12\x1f\n\x05shape\x18\x01 \x01(\x0b\x32\x10.caffe.BlobShape\"\x81\x04\n\x10PoolingParameter\x12\x35\n\x04pool\x18\x01 \x01(\x0e\x32\".caffe.PoolingParameter.PoolMethod:\x03MAX\x12\x0e\n\x03pad\x18\x04 \x01(\r:\x01\x30\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x13\n\x0bkernel_size\x18\x02 \x01(\r\x12\x10\n\x08kernel_h\x18\x05 \x01(\r\x12\x10\n\x08kernel_w\x18\x06 \x01(\r\x12\x11\n\x06stride\x18\x03 \x01(\r:\x01\x31\x12\x10\n\x08stride_h\x18\x07 \x01(\r\x12\x10\n\x08stride_w\x18\x08 \x01(\r\x12\x37\n\x06\x65ngine\x18\x0b \x01(\x0e\x32\x1e.caffe.PoolingParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x1d\n\x0eglobal_pooling\x18\x0c \x01(\x08:\x05\x66\x61lse\x12;\n\nround_mode\x18\r \x01(\x0e\x32!.caffe.PoolingParameter.RoundMode:\x04\x43\x45IL\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\" \n\tRoundMode\x12\x08\n\x04\x43\x45IL\x10\x00\x12\t\n\x05\x46LOOR\x10\x01\"F\n\x0ePowerParameter\x12\x10\n\x05power\x18\x01 \x01(\x02:\x01\x31\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"g\n\x0fPythonParameter\x12\x0e\n\x06module\x18\x01 \x01(\t\x12\r\n\x05layer\x18\x02 \x01(\t\x12\x13\n\tparam_str\x18\x03 \x01(\t:\x00\x12 \n\x11share_in_parallel\x18\x04 \x01(\x08:\x05\x66\x61lse\"\xc0\x01\n\x12RecurrentParameter\x12\x15\n\nnum_output\x18\x01 \x01(\r:\x01\x30\x12-\n\rweight_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x19\n\ndebug_info\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\rexpose_hidden\x18\x05 \x01(\x08:\x05\x66\x61lse\"\xad\x01\n\x12ReductionParameter\x12=\n\toperation\x18\x01 \x01(\x0e\x32%.caffe.ReductionParameter.ReductionOp:\x03SUM\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x30\x12\x10\n\x05\x63oeff\x18\x03 \x01(\x02:\x01\x31\"5\n\x0bReductionOp\x12\x07\n\x03SUM\x10\x01\x12\x08\n\x04\x41SUM\x10\x02\x12\t\n\x05SUMSQ\x10\x03\x12\x08\n\x04MEAN\x10\x04\"\x8d\x01\n\rReLUParameter\x12\x19\n\x0enegative_slope\x18\x01 \x01(\x02:\x01\x30\x12\x34\n\x06\x65ngine\x18\x02 \x01(\x0e\x32\x1b.caffe.ReLUParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"Z\n\x10ReshapeParameter\x12\x1f\n\x05shape\x18\x01 \x01(\x0b\x32\x10.caffe.BlobShape\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x30\x12\x14\n\x08num_axes\x18\x03 \x01(\x05:\x02-1\"\xa5\x01\n\x0eScaleParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x13\n\x08num_axes\x18\x02 \x01(\x05:\x01\x31\x12&\n\x06\x66iller\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x18\n\tbias_term\x18\x04 \x01(\x08:\x05\x66\x61lse\x12+\n\x0b\x62ias_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\"x\n\x10SigmoidParameter\x12\x37\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1e.caffe.SigmoidParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"L\n\x0eSliceParameter\x12\x0f\n\x04\x61xis\x18\x03 \x01(\x05:\x01\x31\x12\x13\n\x0bslice_point\x18\x02 \x03(\r\x12\x14\n\tslice_dim\x18\x01 \x01(\r:\x01\x31\"\x89\x01\n\x10SoftmaxParameter\x12\x37\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1e.caffe.SoftmaxParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"!\n\x0eSwishParameter\x12\x0f\n\x04\x62\x65ta\x18\x01 \x01(\x02:\x01\x31\"r\n\rTanHParameter\x12\x34\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1b.caffe.TanHParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"/\n\rTileParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\r\n\x05tiles\x18\x02 \x01(\x05\"*\n\x12ThresholdParameter\x12\x14\n\tthreshold\x18\x01 \x01(\x02:\x01\x30\"\xc1\x02\n\x13WindowDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x12\n\nbatch_size\x18\x04 \x01(\r\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\x0c\x66g_threshold\x18\x07 \x01(\x02:\x03\x30.5\x12\x19\n\x0c\x62g_threshold\x18\x08 \x01(\x02:\x03\x30.5\x12\x19\n\x0b\x66g_fraction\x18\t \x01(\x02:\x04\x30.25\x12\x16\n\x0b\x63ontext_pad\x18\n \x01(\r:\x01\x30\x12\x17\n\tcrop_mode\x18\x0b \x01(\t:\x04warp\x12\x1b\n\x0c\x63\x61\x63he_images\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x15\n\x0broot_folder\x18\r \x01(\t:\x00\"\xeb\x01\n\x0cSPPParameter\x12\x16\n\x0epyramid_height\x18\x01 \x01(\r\x12\x31\n\x04pool\x18\x02 \x01(\x0e\x32\x1e.caffe.SPPParameter.PoolMethod:\x03MAX\x12\x33\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x1a.caffe.SPPParameter.Engine:\x07\x44\x45\x46\x41ULT\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"\xe0\x13\n\x10V1LayerParameter\x12\x0e\n\x06\x62ottom\x18\x02 \x03(\t\x12\x0b\n\x03top\x18\x03 \x03(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12$\n\x07include\x18 \x03(\x0b\x32\x13.caffe.NetStateRule\x12$\n\x07\x65xclude\x18! \x03(\x0b\x32\x13.caffe.NetStateRule\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.caffe.V1LayerParameter.LayerType\x12\x1f\n\x05\x62lobs\x18\x06 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x0e\n\x05param\x18\xe9\x07 \x03(\t\x12>\n\x0f\x62lob_share_mode\x18\xea\x07 \x03(\x0e\x32$.caffe.V1LayerParameter.DimCheckMode\x12\x10\n\x08\x62lobs_lr\x18\x07 \x03(\x02\x12\x14\n\x0cweight_decay\x18\x08 \x03(\x02\x12\x13\n\x0bloss_weight\x18# \x03(\x02\x12\x30\n\x0e\x61\x63\x63uracy_param\x18\x1b \x01(\x0b\x32\x18.caffe.AccuracyParameter\x12,\n\x0c\x61rgmax_param\x18\x17 \x01(\x0b\x32\x16.caffe.ArgMaxParameter\x12,\n\x0c\x63oncat_param\x18\t \x01(\x0b\x32\x16.caffe.ConcatParameter\x12?\n\x16\x63ontrastive_loss_param\x18( \x01(\x0b\x32\x1f.caffe.ContrastiveLossParameter\x12\x36\n\x11\x63onvolution_param\x18\n \x01(\x0b\x32\x1b.caffe.ConvolutionParameter\x12(\n\ndata_param\x18\x0b \x01(\x0b\x32\x14.caffe.DataParameter\x12.\n\rdropout_param\x18\x0c \x01(\x0b\x32\x17.caffe.DropoutParameter\x12\x33\n\x10\x64ummy_data_param\x18\x1a \x01(\x0b\x32\x19.caffe.DummyDataParameter\x12.\n\reltwise_param\x18\x18 \x01(\x0b\x32\x17.caffe.EltwiseParameter\x12&\n\texp_param\x18) \x01(\x0b\x32\x13.caffe.ExpParameter\x12\x31\n\x0fhdf5_data_param\x18\r \x01(\x0b\x32\x18.caffe.HDF5DataParameter\x12\x35\n\x11hdf5_output_param\x18\x0e \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\x12\x33\n\x10hinge_loss_param\x18\x1d \x01(\x0b\x32\x19.caffe.HingeLossParameter\x12\x33\n\x10image_data_param\x18\x0f \x01(\x0b\x32\x19.caffe.ImageDataParameter\x12\x39\n\x13infogain_loss_param\x18\x10 \x01(\x0b\x32\x1c.caffe.InfogainLossParameter\x12\x39\n\x13inner_product_param\x18\x11 \x01(\x0b\x32\x1c.caffe.InnerProductParameter\x12&\n\tlrn_param\x18\x12 \x01(\x0b\x32\x13.caffe.LRNParameter\x12\x35\n\x11memory_data_param\x18\x16 \x01(\x0b\x32\x1a.caffe.MemoryDataParameter\x12&\n\tmvn_param\x18\" \x01(\x0b\x32\x13.caffe.MVNParameter\x12.\n\rpooling_param\x18\x13 \x01(\x0b\x32\x17.caffe.PoolingParameter\x12*\n\x0bpower_param\x18\x15 \x01(\x0b\x32\x15.caffe.PowerParameter\x12(\n\nrelu_param\x18\x1e \x01(\x0b\x32\x14.caffe.ReLUParameter\x12.\n\rsigmoid_param\x18& \x01(\x0b\x32\x17.caffe.SigmoidParameter\x12.\n\rsoftmax_param\x18\' \x01(\x0b\x32\x17.caffe.SoftmaxParameter\x12*\n\x0bslice_param\x18\x1f \x01(\x0b\x32\x15.caffe.SliceParameter\x12(\n\ntanh_param\x18% \x01(\x0b\x32\x14.caffe.TanHParameter\x12\x32\n\x0fthreshold_param\x18\x19 \x01(\x0b\x32\x19.caffe.ThresholdParameter\x12\x35\n\x11window_data_param\x18\x14 \x01(\x0b\x32\x1a.caffe.WindowDataParameter\x12\x37\n\x0ftransform_param\x18$ \x01(\x0b\x32\x1e.caffe.TransformationParameter\x12(\n\nloss_param\x18* \x01(\x0b\x32\x14.caffe.LossParameter\x12&\n\x05layer\x18\x01 \x01(\x0b\x32\x17.caffe.V0LayerParameter\"\xd8\x04\n\tLayerType\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06\x41\x42SVAL\x10#\x12\x0c\n\x08\x41\x43\x43URACY\x10\x01\x12\n\n\x06\x41RGMAX\x10\x1e\x12\x08\n\x04\x42NLL\x10\x02\x12\n\n\x06\x43ONCAT\x10\x03\x12\x14\n\x10\x43ONTRASTIVE_LOSS\x10%\x12\x0f\n\x0b\x43ONVOLUTION\x10\x04\x12\x08\n\x04\x44\x41TA\x10\x05\x12\x11\n\rDECONVOLUTION\x10\'\x12\x0b\n\x07\x44ROPOUT\x10\x06\x12\x0e\n\nDUMMY_DATA\x10 \x12\x12\n\x0e\x45UCLIDEAN_LOSS\x10\x07\x12\x0b\n\x07\x45LTWISE\x10\x19\x12\x07\n\x03\x45XP\x10&\x12\x0b\n\x07\x46LATTEN\x10\x08\x12\r\n\tHDF5_DATA\x10\t\x12\x0f\n\x0bHDF5_OUTPUT\x10\n\x12\x0e\n\nHINGE_LOSS\x10\x1c\x12\n\n\x06IM2COL\x10\x0b\x12\x0e\n\nIMAGE_DATA\x10\x0c\x12\x11\n\rINFOGAIN_LOSS\x10\r\x12\x11\n\rINNER_PRODUCT\x10\x0e\x12\x07\n\x03LRN\x10\x0f\x12\x0f\n\x0bMEMORY_DATA\x10\x1d\x12\x1d\n\x19MULTINOMIAL_LOGISTIC_LOSS\x10\x10\x12\x07\n\x03MVN\x10\"\x12\x0b\n\x07POOLING\x10\x11\x12\t\n\x05POWER\x10\x1a\x12\x08\n\x04RELU\x10\x12\x12\x0b\n\x07SIGMOID\x10\x13\x12\x1e\n\x1aSIGMOID_CROSS_ENTROPY_LOSS\x10\x1b\x12\x0b\n\x07SILENCE\x10$\x12\x0b\n\x07SOFTMAX\x10\x14\x12\x10\n\x0cSOFTMAX_LOSS\x10\x15\x12\t\n\x05SPLIT\x10\x16\x12\t\n\x05SLICE\x10!\x12\x08\n\x04TANH\x10\x17\x12\x0f\n\x0bWINDOW_DATA\x10\x18\x12\r\n\tTHRESHOLD\x10\x1f\"*\n\x0c\x44imCheckMode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"\xfd\x07\n\x10V0LayerParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x12\n\nnum_output\x18\x03 \x01(\r\x12\x16\n\x08\x62iasterm\x18\x04 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x06 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0e\n\x03pad\x18\x07 \x01(\r:\x01\x30\x12\x12\n\nkernelsize\x18\x08 \x01(\r\x12\x10\n\x05group\x18\t \x01(\r:\x01\x31\x12\x11\n\x06stride\x18\n \x01(\r:\x01\x31\x12\x35\n\x04pool\x18\x0b \x01(\x0e\x32\".caffe.V0LayerParameter.PoolMethod:\x03MAX\x12\x1a\n\rdropout_ratio\x18\x0c \x01(\x02:\x03\x30.5\x12\x15\n\nlocal_size\x18\r \x01(\r:\x01\x35\x12\x10\n\x05\x61lpha\x18\x0e \x01(\x02:\x01\x31\x12\x12\n\x04\x62\x65ta\x18\x0f \x01(\x02:\x04\x30.75\x12\x0c\n\x01k\x18\x16 \x01(\x02:\x01\x31\x12\x0e\n\x06source\x18\x10 \x01(\t\x12\x10\n\x05scale\x18\x11 \x01(\x02:\x01\x31\x12\x10\n\x08meanfile\x18\x12 \x01(\t\x12\x11\n\tbatchsize\x18\x13 \x01(\r\x12\x13\n\x08\x63ropsize\x18\x14 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x05\x62lobs\x18\x32 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x10\n\x08\x62lobs_lr\x18\x33 \x03(\x02\x12\x14\n\x0cweight_decay\x18\x34 \x03(\x02\x12\x14\n\trand_skip\x18\x35 \x01(\r:\x01\x30\x12\x1d\n\x10\x64\x65t_fg_threshold\x18\x36 \x01(\x02:\x03\x30.5\x12\x1d\n\x10\x64\x65t_bg_threshold\x18\x37 \x01(\x02:\x03\x30.5\x12\x1d\n\x0f\x64\x65t_fg_fraction\x18\x38 \x01(\x02:\x04\x30.25\x12\x1a\n\x0f\x64\x65t_context_pad\x18: \x01(\r:\x01\x30\x12\x1b\n\rdet_crop_mode\x18; \x01(\t:\x04warp\x12\x12\n\x07new_num\x18< \x01(\x05:\x01\x30\x12\x17\n\x0cnew_channels\x18= \x01(\x05:\x01\x30\x12\x15\n\nnew_height\x18> \x01(\x05:\x01\x30\x12\x14\n\tnew_width\x18? \x01(\x05:\x01\x30\x12\x1d\n\x0eshuffle_images\x18@ \x01(\x08:\x05\x66\x61lse\x12\x15\n\nconcat_dim\x18\x41 \x01(\r:\x01\x31\x12\x36\n\x11hdf5_output_param\x18\xe9\x07 \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"W\n\x0ePReLUParameter\x12&\n\x06\x66iller\x18\x01 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x1d\n\x0e\x63hannel_shared\x18\x02 \x01(\x08:\x05\x66\x61lse\"!\n\x10PermuteParameter\x12\r\n\x05order\x18\x01 \x03(\r\"\xd1\x01\n\x11UpsampleParameter\x12\x12\n\x06height\x18\x01 \x01(\x05:\x02\x33\x32\x12\x11\n\x05width\x18\x02 \x01(\x05:\x02\x33\x32\x12\x17\n\x0cheight_scale\x18\x03 \x01(\x05:\x01\x32\x12\x16\n\x0bwidth_scale\x18\x04 \x01(\x05:\x01\x32\x12;\n\x04mode\x18\x05 \x01(\x0e\x32#.caffe.UpsampleParameter.UpsampleOp:\x08\x42ILINEAR\"\'\n\nUpsampleOp\x12\x0b\n\x07NEAREST\x10\x00\x12\x0c\n\x08\x42ILINEAR\x10\x01*\x1c\n\x05Phase\x12\t\n\x05TRAIN\x10\x00\x12\x08\n\x04TEST\x10\x01')
_PHASE = DESCRIPTOR.enum_types_by_name['Phase']
Phase = enum_type_wrapper.EnumTypeWrapper(_PHASE)
TRAIN = 0
TEST = 1
_BLOBSHAPE = DESCRIPTOR.message_types_by_name['BlobShape']
_BLOBPROTO = DESCRIPTOR.message_types_by_name['BlobProto']
_BLOBPROTOVECTOR = DESCRIPTOR.message_types_by_name['BlobProtoVector']
_DATUM = DESCRIPTOR.message_types_by_name['Datum']
_FILLERPARAMETER = DESCRIPTOR.message_types_by_name['FillerParameter']
_NETPARAMETER = DESCRIPTOR.message_types_by_name['NetParameter']
_SOLVERPARAMETER = DESCRIPTOR.message_types_by_name['SolverParameter']
_SOLVERSTATE = DESCRIPTOR.message_types_by_name['SolverState']
_NETSTATE = DESCRIPTOR.message_types_by_name['NetState']
_NETSTATERULE = DESCRIPTOR.message_types_by_name['NetStateRule']
_PARAMSPEC = DESCRIPTOR.message_types_by_name['ParamSpec']
_LAYERPARAMETER = DESCRIPTOR.message_types_by_name['LayerParameter']
_TRANSFORMATIONPARAMETER = DESCRIPTOR.message_types_by_name['TransformationParameter']
_LOSSPARAMETER = DESCRIPTOR.message_types_by_name['LossParameter']
_ACCURACYPARAMETER = DESCRIPTOR.message_types_by_name['AccuracyParameter']
_ARGMAXPARAMETER = DESCRIPTOR.message_types_by_name['ArgMaxParameter']
_CLIPPARAMETER = DESCRIPTOR.message_types_by_name['ClipParameter']
_CONCATPARAMETER = DESCRIPTOR.message_types_by_name['ConcatParameter']
_BATCHNORMPARAMETER = DESCRIPTOR.message_types_by_name['BatchNormParameter']
_BIASPARAMETER = DESCRIPTOR.message_types_by_name['BiasParameter']
_CONTRASTIVELOSSPARAMETER = DESCRIPTOR.message_types_by_name['ContrastiveLossParameter']
_CONVOLUTIONPARAMETER = DESCRIPTOR.message_types_by_name['ConvolutionParameter']
_CROPPARAMETER = DESCRIPTOR.message_types_by_name['CropParameter']
_DATAPARAMETER = DESCRIPTOR.message_types_by_name['DataParameter']
_DROPOUTPARAMETER = DESCRIPTOR.message_types_by_name['DropoutParameter']
_DUMMYDATAPARAMETER = DESCRIPTOR.message_types_by_name['DummyDataParameter']
_ELTWISEPARAMETER = DESCRIPTOR.message_types_by_name['EltwiseParameter']
_ELUPARAMETER = DESCRIPTOR.message_types_by_name['ELUParameter']
_EMBEDPARAMETER = DESCRIPTOR.message_types_by_name['EmbedParameter']
_EXPPARAMETER = DESCRIPTOR.message_types_by_name['ExpParameter']
_FLATTENPARAMETER = DESCRIPTOR.message_types_by_name['FlattenParameter']
_HDF5DATAPARAMETER = DESCRIPTOR.message_types_by_name['HDF5DataParameter']
_HDF5OUTPUTPARAMETER = DESCRIPTOR.message_types_by_name['HDF5OutputParameter']
_HINGELOSSPARAMETER = DESCRIPTOR.message_types_by_name['HingeLossParameter']
_IMAGEDATAPARAMETER = DESCRIPTOR.message_types_by_name['ImageDataParameter']
_INFOGAINLOSSPARAMETER = DESCRIPTOR.message_types_by_name['InfogainLossParameter']
_INNERPRODUCTPARAMETER = DESCRIPTOR.message_types_by_name['InnerProductParameter']
_INPUTPARAMETER = DESCRIPTOR.message_types_by_name['InputParameter']
_LOGPARAMETER = DESCRIPTOR.message_types_by_name['LogParameter']
_LRNPARAMETER = DESCRIPTOR.message_types_by_name['LRNParameter']
_MEMORYDATAPARAMETER = DESCRIPTOR.message_types_by_name['MemoryDataParameter']
_MVNPARAMETER = DESCRIPTOR.message_types_by_name['MVNParameter']
_PARAMETERPARAMETER = DESCRIPTOR.message_types_by_name['ParameterParameter']
_POOLINGPARAMETER = DESCRIPTOR.message_types_by_name['PoolingParameter']
_POWERPARAMETER = DESCRIPTOR.message_types_by_name['PowerParameter']
_PYTHONPARAMETER = DESCRIPTOR.message_types_by_name['PythonParameter']
_RECURRENTPARAMETER = DESCRIPTOR.message_types_by_name['RecurrentParameter']
_REDUCTIONPARAMETER = DESCRIPTOR.message_types_by_name['ReductionParameter']
_RELUPARAMETER = DESCRIPTOR.message_types_by_name['ReLUParameter']
_RESHAPEPARAMETER = DESCRIPTOR.message_types_by_name['ReshapeParameter']
_SCALEPARAMETER = DESCRIPTOR.message_types_by_name['ScaleParameter']
_SIGMOIDPARAMETER = DESCRIPTOR.message_types_by_name['SigmoidParameter']
_SLICEPARAMETER = DESCRIPTOR.message_types_by_name['SliceParameter']
_SOFTMAXPARAMETER = DESCRIPTOR.message_types_by_name['SoftmaxParameter']
_SWISHPARAMETER = DESCRIPTOR.message_types_by_name['SwishParameter']
_TANHPARAMETER = DESCRIPTOR.message_types_by_name['TanHParameter']
_TILEPARAMETER = DESCRIPTOR.message_types_by_name['TileParameter']
_THRESHOLDPARAMETER = DESCRIPTOR.message_types_by_name['ThresholdParameter']
_WINDOWDATAPARAMETER = DESCRIPTOR.message_types_by_name['WindowDataParameter']
_SPPPARAMETER = DESCRIPTOR.message_types_by_name['SPPParameter']
_V1LAYERPARAMETER = DESCRIPTOR.message_types_by_name['V1LayerParameter']
_V0LAYERPARAMETER = DESCRIPTOR.message_types_by_name['V0LayerParameter']
_PRELUPARAMETER = DESCRIPTOR.message_types_by_name['PReLUParameter']
_PERMUTEPARAMETER = DESCRIPTOR.message_types_by_name['PermuteParameter']
_UPSAMPLEPARAMETER = DESCRIPTOR.message_types_by_name['UpsampleParameter']
_FILLERPARAMETER_VARIANCENORM = _FILLERPARAMETER.enum_types_by_name['VarianceNorm']
_SOLVERPARAMETER_SNAPSHOTFORMAT = _SOLVERPARAMETER.enum_types_by_name['SnapshotFormat']
_SOLVERPARAMETER_SOLVERMODE = _SOLVERPARAMETER.enum_types_by_name['SolverMode']
_SOLVERPARAMETER_SOLVERTYPE = _SOLVERPARAMETER.enum_types_by_name['SolverType']
_PARAMSPEC_DIMCHECKMODE = _PARAMSPEC.enum_types_by_name['DimCheckMode']
_LOSSPARAMETER_NORMALIZATIONMODE = _LOSSPARAMETER.enum_types_by_name['NormalizationMode']
_CONVOLUTIONPARAMETER_ENGINE = _CONVOLUTIONPARAMETER.enum_types_by_name['Engine']
_DATAPARAMETER_DB = _DATAPARAMETER.enum_types_by_name['DB']
_ELTWISEPARAMETER_ELTWISEOP = _ELTWISEPARAMETER.enum_types_by_name['EltwiseOp']
_HINGELOSSPARAMETER_NORM = _HINGELOSSPARAMETER.enum_types_by_name['Norm']
_LRNPARAMETER_NORMREGION = _LRNPARAMETER.enum_types_by_name['NormRegion']
_LRNPARAMETER_ENGINE = _LRNPARAMETER.enum_types_by_name['Engine']
_POOLINGPARAMETER_POOLMETHOD = _POOLINGPARAMETER.enum_types_by_name['PoolMethod']
_POOLINGPARAMETER_ENGINE = _POOLINGPARAMETER.enum_types_by_name['Engine']
_POOLINGPARAMETER_ROUNDMODE = _POOLINGPARAMETER.enum_types_by_name['RoundMode']
_REDUCTIONPARAMETER_REDUCTIONOP = _REDUCTIONPARAMETER.enum_types_by_name['ReductionOp']
_RELUPARAMETER_ENGINE = _RELUPARAMETER.enum_types_by_name['Engine']
_SIGMOIDPARAMETER_ENGINE = _SIGMOIDPARAMETER.enum_types_by_name['Engine']
_SOFTMAXPARAMETER_ENGINE = _SOFTMAXPARAMETER.enum_types_by_name['Engine']
_TANHPARAMETER_ENGINE = _TANHPARAMETER.enum_types_by_name['Engine']
_SPPPARAMETER_POOLMETHOD = _SPPPARAMETER.enum_types_by_name['PoolMethod']
_SPPPARAMETER_ENGINE = _SPPPARAMETER.enum_types_by_name['Engine']
_V1LAYERPARAMETER_LAYERTYPE = _V1LAYERPARAMETER.enum_types_by_name['LayerType']
_V1LAYERPARAMETER_DIMCHECKMODE = _V1LAYERPARAMETER.enum_types_by_name['DimCheckMode']
_V0LAYERPARAMETER_POOLMETHOD = _V0LAYERPARAMETER.enum_types_by_name['PoolMethod']
_UPSAMPLEPARAMETER_UPSAMPLEOP = _UPSAMPLEPARAMETER.enum_types_by_name['UpsampleOp']
BlobShape = _reflection.GeneratedProtocolMessageType('BlobShape', (_message.Message,), {
'DESCRIPTOR' : _BLOBSHAPE,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobShape)
})
_sym_db.RegisterMessage(BlobShape)
BlobProto = _reflection.GeneratedProtocolMessageType('BlobProto', (_message.Message,), {
'DESCRIPTOR' : _BLOBPROTO,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobProto)
})
_sym_db.RegisterMessage(BlobProto)
BlobProtoVector = _reflection.GeneratedProtocolMessageType('BlobProtoVector', (_message.Message,), {
'DESCRIPTOR' : _BLOBPROTOVECTOR,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobProtoVector)
})
_sym_db.RegisterMessage(BlobProtoVector)
Datum = _reflection.GeneratedProtocolMessageType('Datum', (_message.Message,), {
'DESCRIPTOR' : _DATUM,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.Datum)
})
_sym_db.RegisterMessage(Datum)
FillerParameter = _reflection.GeneratedProtocolMessageType('FillerParameter', (_message.Message,), {
'DESCRIPTOR' : _FILLERPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.FillerParameter)
})
_sym_db.RegisterMessage(FillerParameter)
NetParameter = _reflection.GeneratedProtocolMessageType('NetParameter', (_message.Message,), {
'DESCRIPTOR' : _NETPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetParameter)
})
_sym_db.RegisterMessage(NetParameter)
SolverParameter = _reflection.GeneratedProtocolMessageType('SolverParameter', (_message.Message,), {
'DESCRIPTOR' : _SOLVERPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SolverParameter)
})
_sym_db.RegisterMessage(SolverParameter)
SolverState = _reflection.GeneratedProtocolMessageType('SolverState', (_message.Message,), {
'DESCRIPTOR' : _SOLVERSTATE,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SolverState)
})
_sym_db.RegisterMessage(SolverState)
NetState = _reflection.GeneratedProtocolMessageType('NetState', (_message.Message,), {
'DESCRIPTOR' : _NETSTATE,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetState)
})
_sym_db.RegisterMessage(NetState)
NetStateRule = _reflection.GeneratedProtocolMessageType('NetStateRule', (_message.Message,), {
'DESCRIPTOR' : _NETSTATERULE,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetStateRule)
})
_sym_db.RegisterMessage(NetStateRule)
ParamSpec = _reflection.GeneratedProtocolMessageType('ParamSpec', (_message.Message,), {
'DESCRIPTOR' : _PARAMSPEC,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ParamSpec)
})
_sym_db.RegisterMessage(ParamSpec)
LayerParameter = _reflection.GeneratedProtocolMessageType('LayerParameter', (_message.Message,), {
'DESCRIPTOR' : _LAYERPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LayerParameter)
})
_sym_db.RegisterMessage(LayerParameter)
TransformationParameter = _reflection.GeneratedProtocolMessageType('TransformationParameter', (_message.Message,), {
'DESCRIPTOR' : _TRANSFORMATIONPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TransformationParameter)
})
_sym_db.RegisterMessage(TransformationParameter)
LossParameter = _reflection.GeneratedProtocolMessageType('LossParameter', (_message.Message,), {
'DESCRIPTOR' : _LOSSPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LossParameter)
})
_sym_db.RegisterMessage(LossParameter)
AccuracyParameter = _reflection.GeneratedProtocolMessageType('AccuracyParameter', (_message.Message,), {
'DESCRIPTOR' : _ACCURACYPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AccuracyParameter)
})
_sym_db.RegisterMessage(AccuracyParameter)
ArgMaxParameter = _reflection.GeneratedProtocolMessageType('ArgMaxParameter', (_message.Message,), {
'DESCRIPTOR' : _ARGMAXPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ArgMaxParameter)
})
_sym_db.RegisterMessage(ArgMaxParameter)
ClipParameter = _reflection.GeneratedProtocolMessageType('ClipParameter', (_message.Message,), {
'DESCRIPTOR' : _CLIPPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ClipParameter)
})
_sym_db.RegisterMessage(ClipParameter)
ConcatParameter = _reflection.GeneratedProtocolMessageType('ConcatParameter', (_message.Message,), {
'DESCRIPTOR' : _CONCATPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ConcatParameter)
})
_sym_db.RegisterMessage(ConcatParameter)
BatchNormParameter = _reflection.GeneratedProtocolMessageType('BatchNormParameter', (_message.Message,), {
'DESCRIPTOR' : _BATCHNORMPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BatchNormParameter)
})
_sym_db.RegisterMessage(BatchNormParameter)
BiasParameter = _reflection.GeneratedProtocolMessageType('BiasParameter', (_message.Message,), {
'DESCRIPTOR' : _BIASPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BiasParameter)
})
_sym_db.RegisterMessage(BiasParameter)
ContrastiveLossParameter = _reflection.GeneratedProtocolMessageType('ContrastiveLossParameter', (_message.Message,), {
'DESCRIPTOR' : _CONTRASTIVELOSSPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ContrastiveLossParameter)
})
_sym_db.RegisterMessage(ContrastiveLossParameter)
ConvolutionParameter = _reflection.GeneratedProtocolMessageType('ConvolutionParameter', (_message.Message,), {
'DESCRIPTOR' : _CONVOLUTIONPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ConvolutionParameter)
})
_sym_db.RegisterMessage(ConvolutionParameter)
CropParameter = _reflection.GeneratedProtocolMessageType('CropParameter', (_message.Message,), {
'DESCRIPTOR' : _CROPPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.CropParameter)
})
_sym_db.RegisterMessage(CropParameter)
DataParameter = _reflection.GeneratedProtocolMessageType('DataParameter', (_message.Message,), {
'DESCRIPTOR' : _DATAPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DataParameter)
})
_sym_db.RegisterMessage(DataParameter)
DropoutParameter = _reflection.GeneratedProtocolMessageType('DropoutParameter', (_message.Message,), {
'DESCRIPTOR' : _DROPOUTPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DropoutParameter)
})
_sym_db.RegisterMessage(DropoutParameter)
DummyDataParameter = _reflection.GeneratedProtocolMessageType('DummyDataParameter', (_message.Message,), {
'DESCRIPTOR' : _DUMMYDATAPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DummyDataParameter)
})
_sym_db.RegisterMessage(DummyDataParameter)
EltwiseParameter = _reflection.GeneratedProtocolMessageType('EltwiseParameter', (_message.Message,), {
'DESCRIPTOR' : _ELTWISEPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.EltwiseParameter)
})
_sym_db.RegisterMessage(EltwiseParameter)
ELUParameter = _reflection.GeneratedProtocolMessageType('ELUParameter', (_message.Message,), {
'DESCRIPTOR' : _ELUPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ELUParameter)
})
_sym_db.RegisterMessage(ELUParameter)
EmbedParameter = _reflection.GeneratedProtocolMessageType('EmbedParameter', (_message.Message,), {
'DESCRIPTOR' : _EMBEDPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.EmbedParameter)
})
_sym_db.RegisterMessage(EmbedParameter)
ExpParameter = _reflection.GeneratedProtocolMessageType('ExpParameter', (_message.Message,), {
'DESCRIPTOR' : _EXPPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ExpParameter)
})
_sym_db.RegisterMessage(ExpParameter)
FlattenParameter = _reflection.GeneratedProtocolMessageType('FlattenParameter', (_message.Message,), {
'DESCRIPTOR' : _FLATTENPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.FlattenParameter)
})
_sym_db.RegisterMessage(FlattenParameter)
HDF5DataParameter = _reflection.GeneratedProtocolMessageType('HDF5DataParameter', (_message.Message,), {
'DESCRIPTOR' : _HDF5DATAPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HDF5DataParameter)
})
_sym_db.RegisterMessage(HDF5DataParameter)
HDF5OutputParameter = _reflection.GeneratedProtocolMessageType('HDF5OutputParameter', (_message.Message,), {
'DESCRIPTOR' : _HDF5OUTPUTPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HDF5OutputParameter)
})
_sym_db.RegisterMessage(HDF5OutputParameter)
HingeLossParameter = _reflection.GeneratedProtocolMessageType('HingeLossParameter', (_message.Message,), {
'DESCRIPTOR' : _HINGELOSSPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HingeLossParameter)
})
_sym_db.RegisterMessage(HingeLossParameter)
ImageDataParameter = _reflection.GeneratedProtocolMessageType('ImageDataParameter', (_message.Message,), {
'DESCRIPTOR' : _IMAGEDATAPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ImageDataParameter)
})
_sym_db.RegisterMessage(ImageDataParameter)
InfogainLossParameter = _reflection.GeneratedProtocolMessageType('InfogainLossParameter', (_message.Message,), {
'DESCRIPTOR' : _INFOGAINLOSSPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InfogainLossParameter)
})
_sym_db.RegisterMessage(InfogainLossParameter)
InnerProductParameter = _reflection.GeneratedProtocolMessageType('InnerProductParameter', (_message.Message,), {
'DESCRIPTOR' : _INNERPRODUCTPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InnerProductParameter)
})
_sym_db.RegisterMessage(InnerProductParameter)
InputParameter = _reflection.GeneratedProtocolMessageType('InputParameter', (_message.Message,), {
'DESCRIPTOR' : _INPUTPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InputParameter)
})
_sym_db.RegisterMessage(InputParameter)
LogParameter = _reflection.GeneratedProtocolMessageType('LogParameter', (_message.Message,), {
'DESCRIPTOR' : _LOGPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LogParameter)
})
_sym_db.RegisterMessage(LogParameter)
LRNParameter = _reflection.GeneratedProtocolMessageType('LRNParameter', (_message.Message,), {
'DESCRIPTOR' : _LRNPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LRNParameter)
})
_sym_db.RegisterMessage(LRNParameter)
MemoryDataParameter = _reflection.GeneratedProtocolMessageType('MemoryDataParameter', (_message.Message,), {
'DESCRIPTOR' : _MEMORYDATAPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MemoryDataParameter)
})
_sym_db.RegisterMessage(MemoryDataParameter)
MVNParameter = _reflection.GeneratedProtocolMessageType('MVNParameter', (_message.Message,), {
'DESCRIPTOR' : _MVNPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MVNParameter)
})
_sym_db.RegisterMessage(MVNParameter)
ParameterParameter = _reflection.GeneratedProtocolMessageType('ParameterParameter', (_message.Message,), {
'DESCRIPTOR' : _PARAMETERPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ParameterParameter)
})
_sym_db.RegisterMessage(ParameterParameter)
PoolingParameter = _reflection.GeneratedProtocolMessageType('PoolingParameter', (_message.Message,), {
'DESCRIPTOR' : _POOLINGPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PoolingParameter)
})
_sym_db.RegisterMessage(PoolingParameter)
PowerParameter = _reflection.GeneratedProtocolMessageType('PowerParameter', (_message.Message,), {
'DESCRIPTOR' : _POWERPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PowerParameter)
})
_sym_db.RegisterMessage(PowerParameter)
PythonParameter = _reflection.GeneratedProtocolMessageType('PythonParameter', (_message.Message,), {
'DESCRIPTOR' : _PYTHONPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PythonParameter)
})
_sym_db.RegisterMessage(PythonParameter)
RecurrentParameter = _reflection.GeneratedProtocolMessageType('RecurrentParameter', (_message.Message,), {
'DESCRIPTOR' : _RECURRENTPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.RecurrentParameter)
})
_sym_db.RegisterMessage(RecurrentParameter)
ReductionParameter = _reflection.GeneratedProtocolMessageType('ReductionParameter', (_message.Message,), {
'DESCRIPTOR' : _REDUCTIONPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReductionParameter)
})
_sym_db.RegisterMessage(ReductionParameter)
ReLUParameter = _reflection.GeneratedProtocolMessageType('ReLUParameter', (_message.Message,), {
'DESCRIPTOR' : _RELUPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReLUParameter)
})
_sym_db.RegisterMessage(ReLUParameter)
ReshapeParameter = _reflection.GeneratedProtocolMessageType('ReshapeParameter', (_message.Message,), {
'DESCRIPTOR' : _RESHAPEPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReshapeParameter)
})
_sym_db.RegisterMessage(ReshapeParameter)
ScaleParameter = _reflection.GeneratedProtocolMessageType('ScaleParameter', (_message.Message,), {
'DESCRIPTOR' : _SCALEPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ScaleParameter)
})
_sym_db.RegisterMessage(ScaleParameter)
SigmoidParameter = _reflection.GeneratedProtocolMessageType('SigmoidParameter', (_message.Message,), {
'DESCRIPTOR' : _SIGMOIDPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SigmoidParameter)
})
_sym_db.RegisterMessage(SigmoidParameter)
SliceParameter = _reflection.GeneratedProtocolMessageType('SliceParameter', (_message.Message,), {
'DESCRIPTOR' : _SLICEPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SliceParameter)
})
_sym_db.RegisterMessage(SliceParameter)
SoftmaxParameter = _reflection.GeneratedProtocolMessageType('SoftmaxParameter', (_message.Message,), {
'DESCRIPTOR' : _SOFTMAXPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SoftmaxParameter)
})
_sym_db.RegisterMessage(SoftmaxParameter)
SwishParameter = _reflection.GeneratedProtocolMessageType('SwishParameter', (_message.Message,), {
'DESCRIPTOR' : _SWISHPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SwishParameter)
})
_sym_db.RegisterMessage(SwishParameter)
TanHParameter = _reflection.GeneratedProtocolMessageType('TanHParameter', (_message.Message,), {
'DESCRIPTOR' : _TANHPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TanHParameter)
})
_sym_db.RegisterMessage(TanHParameter)
TileParameter = _reflection.GeneratedProtocolMessageType('TileParameter', (_message.Message,), {
'DESCRIPTOR' : _TILEPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TileParameter)
})
_sym_db.RegisterMessage(TileParameter)
ThresholdParameter = _reflection.GeneratedProtocolMessageType('ThresholdParameter', (_message.Message,), {
'DESCRIPTOR' : _THRESHOLDPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ThresholdParameter)
})
_sym_db.RegisterMessage(ThresholdParameter)
WindowDataParameter = _reflection.GeneratedProtocolMessageType('WindowDataParameter', (_message.Message,), {
'DESCRIPTOR' : _WINDOWDATAPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.WindowDataParameter)
})
_sym_db.RegisterMessage(WindowDataParameter)
SPPParameter = _reflection.GeneratedProtocolMessageType('SPPParameter', (_message.Message,), {
'DESCRIPTOR' : _SPPPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SPPParameter)
})
_sym_db.RegisterMessage(SPPParameter)
V1LayerParameter = _reflection.GeneratedProtocolMessageType('V1LayerParameter', (_message.Message,), {
'DESCRIPTOR' : _V1LAYERPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.V1LayerParameter)
})
_sym_db.RegisterMessage(V1LayerParameter)
V0LayerParameter = _reflection.GeneratedProtocolMessageType('V0LayerParameter', (_message.Message,), {
'DESCRIPTOR' : _V0LAYERPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.V0LayerParameter)
})
_sym_db.RegisterMessage(V0LayerParameter)
PReLUParameter = _reflection.GeneratedProtocolMessageType('PReLUParameter', (_message.Message,), {
'DESCRIPTOR' : _PRELUPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PReLUParameter)
})
_sym_db.RegisterMessage(PReLUParameter)
PermuteParameter = _reflection.GeneratedProtocolMessageType('PermuteParameter', (_message.Message,), {
'DESCRIPTOR' : _PERMUTEPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PermuteParameter)
})
_sym_db.RegisterMessage(PermuteParameter)
UpsampleParameter = _reflection.GeneratedProtocolMessageType('UpsampleParameter', (_message.Message,), {
'DESCRIPTOR' : _UPSAMPLEPARAMETER,
'__module__' : 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.UpsampleParameter)
})
_sym_db.RegisterMessage(UpsampleParameter)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_BLOBSHAPE.fields_by_name['dim']._options = None
_BLOBSHAPE.fields_by_name['dim']._serialized_options = b'\020\001'
_BLOBPROTO.fields_by_name['data']._options = None
_BLOBPROTO.fields_by_name['data']._serialized_options = b'\020\001'
_BLOBPROTO.fields_by_name['diff']._options = None
_BLOBPROTO.fields_by_name['diff']._serialized_options = b'\020\001'
_BLOBPROTO.fields_by_name['double_data']._options = None
_BLOBPROTO.fields_by_name['double_data']._serialized_options = b'\020\001'
_BLOBPROTO.fields_by_name['double_diff']._options = None
_BLOBPROTO.fields_by_name['double_diff']._serialized_options = b'\020\001'
_PHASE._serialized_start=16028
_PHASE._serialized_end=16056
_BLOBSHAPE._serialized_start=22
_BLOBSHAPE._serialized_end=50
_BLOBPROTO._serialized_start=53
_BLOBPROTO._serialized_end=257
_BLOBPROTOVECTOR._serialized_start=259
_BLOBPROTOVECTOR._serialized_end=309
_DATUM._serialized_start=312
_DATUM._serialized_end=441
_FILLERPARAMETER._serialized_start=444
_FILLERPARAMETER._serialized_end=710
_FILLERPARAMETER_VARIANCENORM._serialized_start=658
_FILLERPARAMETER_VARIANCENORM._serialized_end=710
_NETPARAMETER._serialized_start=713
_NETPARAMETER._serialized_end=983
_SOLVERPARAMETER._serialized_start=986
_SOLVERPARAMETER._serialized_end=2350
_SOLVERPARAMETER_SNAPSHOTFORMAT._serialized_start=2188
_SOLVERPARAMETER_SNAPSHOTFORMAT._serialized_end=2231
_SOLVERPARAMETER_SOLVERMODE._serialized_start=2233
_SOLVERPARAMETER_SOLVERMODE._serialized_end=2263
_SOLVERPARAMETER_SOLVERTYPE._serialized_start=2265
_SOLVERPARAMETER_SOLVERTYPE._serialized_end=2350
_SOLVERSTATE._serialized_start=2352
_SOLVERSTATE._serialized_end=2460
_NETSTATE._serialized_start=2462
_NETSTATE._serialized_end=2540
_NETSTATERULE._serialized_start=2542
_NETSTATERULE._serialized_end=2657
_PARAMSPEC._serialized_start=2660
_PARAMSPEC._serialized_end=2823
_PARAMSPEC_DIMCHECKMODE._serialized_start=2781
_PARAMSPEC_DIMCHECKMODE._serialized_end=2823
_LAYERPARAMETER._serialized_start=2826
_LAYERPARAMETER._serialized_end=5576
_TRANSFORMATIONPARAMETER._serialized_start=5579
_TRANSFORMATIONPARAMETER._serialized_end=5761
_LOSSPARAMETER._serialized_start=5764
_LOSSPARAMETER._serialized_end=5958
_LOSSPARAMETER_NORMALIZATIONMODE._serialized_start=5892
_LOSSPARAMETER_NORMALIZATIONMODE._serialized_end=5958
_ACCURACYPARAMETER._serialized_start=5960
_ACCURACYPARAMETER._serialized_end=6036
_ARGMAXPARAMETER._serialized_start=6038
_ARGMAXPARAMETER._serialized_end=6115
_CLIPPARAMETER._serialized_start=6117
_CLIPPARAMETER._serialized_end=6158
_CONCATPARAMETER._serialized_start=6160
_CONCATPARAMETER._serialized_end=6217
_BATCHNORMPARAMETER._serialized_start=6219
_BATCHNORMPARAMETER._serialized_end=6325
_BIASPARAMETER._serialized_start=6327
_BIASPARAMETER._serialized_end=6420
_CONTRASTIVELOSSPARAMETER._serialized_start=6422
_CONTRASTIVELOSSPARAMETER._serialized_end=6498
_CONVOLUTIONPARAMETER._serialized_start=6501
_CONVOLUTIONPARAMETER._serialized_end=7009
_CONVOLUTIONPARAMETER_ENGINE._serialized_start=6966
_CONVOLUTIONPARAMETER_ENGINE._serialized_end=7009
_CROPPARAMETER._serialized_start=7011
_CROPPARAMETER._serialized_end=7059
_DATAPARAMETER._serialized_start=7062
_DATAPARAMETER._serialized_end=7354
_DATAPARAMETER_DB._serialized_start=7327
_DATAPARAMETER_DB._serialized_end=7354
_DROPOUTPARAMETER._serialized_start=7356
_DROPOUTPARAMETER._serialized_end=7402
_DUMMYDATAPARAMETER._serialized_start=7405
_DUMMYDATAPARAMETER._serialized_end=7565
_ELTWISEPARAMETER._serialized_start=7568
_ELTWISEPARAMETER._serialized_end=7733
_ELTWISEPARAMETER_ELTWISEOP._serialized_start=7694
_ELTWISEPARAMETER_ELTWISEOP._serialized_end=7733
_ELUPARAMETER._serialized_start=7735
_ELUPARAMETER._serialized_end=7767
_EMBEDPARAMETER._serialized_start=7770
_EMBEDPARAMETER._serialized_end=7942
_EXPPARAMETER._serialized_start=7944
_EXPPARAMETER._serialized_end=8012
_FLATTENPARAMETER._serialized_start=8014
_FLATTENPARAMETER._serialized_end=8071
_HDF5DATAPARAMETER._serialized_start=8073
_HDF5DATAPARAMETER._serialized_end=8152
_HDF5OUTPUTPARAMETER._serialized_start=8154
_HDF5OUTPUTPARAMETER._serialized_end=8194
_HINGELOSSPARAMETER._serialized_start=8196
_HINGELOSSPARAMETER._serialized_end=8290
_HINGELOSSPARAMETER_NORM._serialized_start=8268
_HINGELOSSPARAMETER_NORM._serialized_end=8290
_IMAGEDATAPARAMETER._serialized_start=8293
_IMAGEDATAPARAMETER._serialized_end=8572
_INFOGAINLOSSPARAMETER._serialized_start=8574
_INFOGAINLOSSPARAMETER._serialized_end=8630
_INNERPRODUCTPARAMETER._serialized_start=8633
_INNERPRODUCTPARAMETER._serialized_end=8836
_INPUTPARAMETER._serialized_start=8838
_INPUTPARAMETER._serialized_end=8887
_LOGPARAMETER._serialized_start=8889
_LOGPARAMETER._serialized_end=8957
_LRNPARAMETER._serialized_start=8960
_LRNPARAMETER._serialized_end=9272
_LRNPARAMETER_NORMREGION._serialized_start=9174
_LRNPARAMETER_NORMREGION._serialized_end=9227
_LRNPARAMETER_ENGINE._serialized_start=6966
_LRNPARAMETER_ENGINE._serialized_end=7009
_MEMORYDATAPARAMETER._serialized_start=9274
_MEMORYDATAPARAMETER._serialized_end=9364
_MVNPARAMETER._serialized_start=9366
_MVNPARAMETER._serialized_end=9466
_PARAMETERPARAMETER._serialized_start=9468
_PARAMETERPARAMETER._serialized_end=9521
_POOLINGPARAMETER._serialized_start=9524
_POOLINGPARAMETER._serialized_end=10037
_POOLINGPARAMETER_POOLMETHOD._serialized_start=9912
_POOLINGPARAMETER_POOLMETHOD._serialized_end=9958
_POOLINGPARAMETER_ENGINE._serialized_start=6966
_POOLINGPARAMETER_ENGINE._serialized_end=7009
_POOLINGPARAMETER_ROUNDMODE._serialized_start=10005
_POOLINGPARAMETER_ROUNDMODE._serialized_end=10037
_POWERPARAMETER._serialized_start=10039
_POWERPARAMETER._serialized_end=10109
_PYTHONPARAMETER._serialized_start=10111
_PYTHONPARAMETER._serialized_end=10214
_RECURRENTPARAMETER._serialized_start=10217
_RECURRENTPARAMETER._serialized_end=10409
_REDUCTIONPARAMETER._serialized_start=10412
_REDUCTIONPARAMETER._serialized_end=10585
_REDUCTIONPARAMETER_REDUCTIONOP._serialized_start=10532
_REDUCTIONPARAMETER_REDUCTIONOP._serialized_end=10585
_RELUPARAMETER._serialized_start=10588
_RELUPARAMETER._serialized_end=10729
_RELUPARAMETER_ENGINE._serialized_start=6966
_RELUPARAMETER_ENGINE._serialized_end=7009
_RESHAPEPARAMETER._serialized_start=10731
_RESHAPEPARAMETER._serialized_end=10821
_SCALEPARAMETER._serialized_start=10824
_SCALEPARAMETER._serialized_end=10989
_SIGMOIDPARAMETER._serialized_start=10991
_SIGMOIDPARAMETER._serialized_end=11111
_SIGMOIDPARAMETER_ENGINE._serialized_start=6966
_SIGMOIDPARAMETER_ENGINE._serialized_end=7009
_SLICEPARAMETER._serialized_start=11113
_SLICEPARAMETER._serialized_end=11189
_SOFTMAXPARAMETER._serialized_start=11192
_SOFTMAXPARAMETER._serialized_end=11329
_SOFTMAXPARAMETER_ENGINE._serialized_start=6966
_SOFTMAXPARAMETER_ENGINE._serialized_end=7009
_SWISHPARAMETER._serialized_start=11331
_SWISHPARAMETER._serialized_end=11364
_TANHPARAMETER._serialized_start=11366
_TANHPARAMETER._serialized_end=11480
_TANHPARAMETER_ENGINE._serialized_start=6966
_TANHPARAMETER_ENGINE._serialized_end=7009
_TILEPARAMETER._serialized_start=11482
_TILEPARAMETER._serialized_end=11529
_THRESHOLDPARAMETER._serialized_start=11531
_THRESHOLDPARAMETER._serialized_end=11573
_WINDOWDATAPARAMETER._serialized_start=11576
_WINDOWDATAPARAMETER._serialized_end=11897
_SPPPARAMETER._serialized_start=11900
_SPPPARAMETER._serialized_end=12135
_SPPPARAMETER_POOLMETHOD._serialized_start=9912
_SPPPARAMETER_POOLMETHOD._serialized_end=9958
_SPPPARAMETER_ENGINE._serialized_start=6966
_SPPPARAMETER_ENGINE._serialized_end=7009
_V1LAYERPARAMETER._serialized_start=12138
_V1LAYERPARAMETER._serialized_end=14666
_V1LAYERPARAMETER_LAYERTYPE._serialized_start=14022
_V1LAYERPARAMETER_LAYERTYPE._serialized_end=14622
_V1LAYERPARAMETER_DIMCHECKMODE._serialized_start=2781
_V1LAYERPARAMETER_DIMCHECKMODE._serialized_end=2823
_V0LAYERPARAMETER._serialized_start=14669
_V0LAYERPARAMETER._serialized_end=15690
_V0LAYERPARAMETER_POOLMETHOD._serialized_start=9912
_V0LAYERPARAMETER_POOLMETHOD._serialized_end=9958
_PRELUPARAMETER._serialized_start=15692
_PRELUPARAMETER._serialized_end=15779
_PERMUTEPARAMETER._serialized_start=15781
_PERMUTEPARAMETER._serialized_end=15814
_UPSAMPLEPARAMETER._serialized_start=15817
_UPSAMPLEPARAMETER._serialized_end=16026
_UPSAMPLEPARAMETER_UPSAMPLEOP._serialized_start=15987
_UPSAMPLEPARAMETER_UPSAMPLEOP._serialized_end=16026
# @@protoc_insertion_point(module_scope)
| 83.254545 | 30,057 | 0.800471 | 9,038 | 64,106 | 5.427418 | 0.101792 | 0.01003 | 0.019081 | 0.031802 | 0.416203 | 0.325994 | 0.277027 | 0.224696 | 0.20168 | 0.128922 | 0 | 0.147591 | 0.046345 | 64,106 | 769 | 30,058 | 83.362809 | 0.65478 | 0.064003 | 0 | 0.208333 | 1 | 0.072115 | 0.380256 | 0.305283 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.009615 | 0 | 0.009615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f51d2a0ec979865686042df3864ca787e25c4cd | 738 | py | Python | tests/CRAFT/MFW/lambdaV1.py | idaholab/SR2ML | 2aa5e0be02786523cdeaf898d42411a7068d30b7 | [
"Apache-2.0"
] | 5 | 2021-01-25T02:01:22.000Z | 2021-12-27T03:14:49.000Z | tests/CRAFT/MFW/lambdaV1.py | idaholab/SR2ML | 2aa5e0be02786523cdeaf898d42411a7068d30b7 | [
"Apache-2.0"
] | 32 | 2021-01-12T18:43:29.000Z | 2022-02-17T19:45:27.000Z | tests/CRAFT/testMC_timeDep/lambdaV1.py | idaholab/SR2ML | 2aa5e0be02786523cdeaf898d42411a7068d30b7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020, Battelle Energy Alliance, LLC
# ALL RIGHTS RESERVED
import numpy as np
import math
import random
from scipy.integrate import quad
def timeDepLambda(t,a,b):
return a+t*b
def pdfFailure(t,a,b):
first = timeDepLambda(t,a,b)
second = math.exp(-quad(timeDepLambda, 0, t, args=(a,b))[0])
return first*second
def run(self,Input):
# lambda(t) = a + t*b
# intput: a_V1, b_V1, T (max time)
# output: t_V1, p_V1
self.p_V1 = np.zeros(Input['time'].size)
for index,value in np.ndenumerate(Input['time']):
#self.p_V1[index[0]] = quad(pdfFailure, 0, value, args=(Input['a_V1'],Input['b_V1']))[0]
self.p_V1[index[0]] = 1. - math.exp(-quad(timeDepLambda, 0, value, args=(Input['a_V1'],Input['b_V1']))[0])
| 28.384615 | 110 | 0.666667 | 130 | 738 | 3.7 | 0.376923 | 0.016632 | 0.018711 | 0.066528 | 0.268191 | 0.112266 | 0.112266 | 0.112266 | 0.112266 | 0.112266 | 0 | 0.038339 | 0.151762 | 738 | 25 | 111 | 29.52 | 0.730032 | 0.303523 | 0 | 0 | 0 | 0 | 0.031558 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0.285714 | 0.071429 | 0.642857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
8f644f6734e040c6ba15713aebdfb20814872df6 | 140 | py | Python | submissions/abc058/b.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | 1 | 2021-05-10T01:16:28.000Z | 2021-05-10T01:16:28.000Z | submissions/abc058/b.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | 3 | 2021-05-11T06:14:15.000Z | 2021-06-19T08:18:36.000Z | submissions/abc058/b.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
] | null | null | null | o = input()
e = input()
ans = ''
for i in range(len(e)):
ans += o[i]
ans += e[i]
if len(o)-len(e) == 1:
ans += o[-1]
print(ans)
| 14 | 23 | 0.457143 | 28 | 140 | 2.285714 | 0.428571 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019802 | 0.278571 | 140 | 9 | 24 | 15.555556 | 0.613861 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f66133575ba5eaf993451dbd859e1f29cf63dd0 | 1,637 | py | Python | src/datasets/imagenet_r.py | mmatena/wise-ft | 2630c366d252ad32db82ea886f7ab6a752142792 | [
"MIT"
] | 79 | 2021-10-01T22:29:51.000Z | 2022-03-30T04:19:58.000Z | src/datasets/imagenet_r.py | mmatena/wise-ft | 2630c366d252ad32db82ea886f7ab6a752142792 | [
"MIT"
] | 2 | 2021-11-18T19:50:59.000Z | 2022-01-08T00:57:24.000Z | src/datasets/imagenet_r.py | mmatena/wise-ft | 2630c366d252ad32db82ea886f7ab6a752142792 | [
"MIT"
] | 10 | 2021-10-14T18:29:59.000Z | 2022-03-27T12:40:18.000Z | import os
import torch
import torchvision.datasets as datasets
from .imagenet import ImageNetSubsample, ImageNetSubsampleValClasses
import numpy as np
CLASS_SUBLIST = [
1, 2, 4, 6, 8, 9, 11, 13, 22, 23, 26, 29, 31, 39, 47, 63, 71, 76, 79, 84, 90, 94, 96, 97, 99, 100, 105, 107,
113, 122,
125, 130, 132, 144, 145, 147, 148, 150, 151, 155, 160, 161, 162, 163, 171, 172, 178, 187, 195, 199, 203,
207, 208, 219,
231, 232, 234, 235, 242, 245, 247, 250, 251, 254, 259, 260, 263, 265, 267, 269, 276, 277, 281, 288, 289,
291, 292, 293,
296, 299, 301, 308, 309, 310, 311, 314, 315, 319, 323, 327, 330, 334, 335, 337, 338, 340, 341, 344, 347,
353, 355, 361,
362, 365, 366, 367, 368, 372, 388, 390, 393, 397, 401, 407, 413, 414, 425, 428, 430, 435, 437, 441, 447,
448, 457, 462,
463, 469, 470, 471, 472, 476, 483, 487, 515, 546, 555, 558, 570, 579, 583, 587, 593, 594, 596, 609, 613,
617, 621, 629,
637, 657, 658, 701, 717, 724, 763, 768, 774, 776, 779, 780, 787, 805, 812, 815, 820, 824, 833, 847, 852,
866, 875, 883,
889, 895, 907, 928, 931, 932, 933, 934, 936, 937, 943, 945, 947, 948, 949, 951, 953, 954, 957, 963, 965,
967, 980, 981,
983, 988]
CLASS_SUBLIST_MASK = [(i in CLASS_SUBLIST) for i in range(1000)]
class ImageNetRValClasses(ImageNetSubsampleValClasses):
def get_class_sublist_and_mask(self):
return CLASS_SUBLIST, CLASS_SUBLIST_MASK
class ImageNetR(ImageNetSubsample):
def get_class_sublist_and_mask(self):
return CLASS_SUBLIST, CLASS_SUBLIST_MASK
def get_test_path(self):
return os.path.join(self.location, 'imagenet-r') | 41.974359 | 112 | 0.635919 | 277 | 1,637 | 3.685921 | 0.837545 | 0.105779 | 0.047013 | 0.03526 | 0.123408 | 0.123408 | 0.123408 | 0.123408 | 0.123408 | 0.123408 | 0 | 0.448006 | 0.218693 | 1,637 | 39 | 113 | 41.974359 | 0.350274 | 0 | 0 | 0.125 | 0 | 0 | 0.006105 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0 | 0.15625 | 0.09375 | 0.40625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8f6eca7563e2ddd3d0e7e1f65939a98d7d6facd2 | 598 | py | Python | tests/basic_test.py | LE-LOY/socialsims | b1eb0413d3ce52886e41679024974f5d7c1dc32f | [
"MIT"
] | null | null | null | tests/basic_test.py | LE-LOY/socialsims | b1eb0413d3ce52886e41679024974f5d7c1dc32f | [
"MIT"
] | null | null | null | tests/basic_test.py | LE-LOY/socialsims | b1eb0413d3ce52886e41679024974f5d7c1dc32f | [
"MIT"
] | null | null | null | # from ..sims.deprecated.cat_mouse import data_visualizer as cat_mouse_vis
# from ..sims.deprecated.route_choice import data_visualizer as route_choice_vis
# from ..sims.deprecated.simple_migration import data_visualizer as simple_mig_vis
"""
@pytest.mark.skip(reason="deprecated")
def test_cat_mouse_visualizer():
cat_mouse_vis.visualize(test=True)
@pytest.mark.skip(reason="deprecated")
def test_route_choice_visualizer():
route_choice_vis.visualize(test=True)
@pytest.mark.skip(reason="deprecated")
def test_simple_migration_visualizer():
simple_mig_vis.visualize(test=True)
""" | 31.473684 | 82 | 0.804348 | 84 | 598 | 5.404762 | 0.27381 | 0.070485 | 0.118943 | 0.145374 | 0.332599 | 0.332599 | 0.332599 | 0.251101 | 0.251101 | 0.251101 | 0 | 0 | 0.085284 | 598 | 19 | 83 | 31.473684 | 0.829982 | 0.976589 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
56bd7fdd6caf6376c1fc4fab1089d4ae75cad328 | 272 | py | Python | tests/test_mparser.py | Dih5/zadeh | 3cc0d2a4803a77d8d4d0a90c0012eea0397ee9ca | [
"MIT"
] | 2 | 2020-11-24T14:37:05.000Z | 2021-06-16T20:02:41.000Z | tests/test_mparser.py | Dih5/zadeh | 3cc0d2a4803a77d8d4d0a90c0012eea0397ee9ca | [
"MIT"
] | null | null | null | tests/test_mparser.py | Dih5/zadeh | 3cc0d2a4803a77d8d4d0a90c0012eea0397ee9ca | [
"MIT"
] | null | null | null | import zadeh
import os
def test_parse():
"""Load an external .fis file"""
fis = zadeh.FIS.from_matlab(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "tipper.fis"))
assert isinstance(fis.get_crisp_output({"food": 5, "service": 5}), float)
| 24.727273 | 111 | 0.680147 | 41 | 272 | 4.317073 | 0.682927 | 0.101695 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008547 | 0.139706 | 272 | 10 | 112 | 27.2 | 0.747863 | 0.095588 | 0 | 0 | 0 | 0 | 0.104167 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.2 | false | 0 | 0.4 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
56c407bfcd2ed5ebef40aab5ddeb784178957418 | 804 | py | Python | instructions/docker_build.py | gnubyte/publicServerAutomator | b26828519b72702119013061cdf881a4a353e9ed | [
"MIT"
] | null | null | null | instructions/docker_build.py | gnubyte/publicServerAutomator | b26828519b72702119013061cdf881a4a353e9ed | [
"MIT"
] | 1 | 2019-10-25T20:13:29.000Z | 2019-10-25T20:13:29.000Z | instructions/docker_build.py | gnubyte/publicServerAutomator | b26828519b72702119013061cdf881a4a353e9ed | [
"MIT"
] | null | null | null | # @Author: Patrick Hastings
# @Date 4-13-2018
# ----------------
import server
dockerInstructions = [
"apt-get update -y",
"apt-get install apt-transport-https -y",
"apt-get install software-properties-common -y",
"apt-get install curl -y",
"apt-get install gnupg2 -y",
"apt-get install git -y",
"apt-get install acl -y",
"apt-get install fail2ban -y"
'''add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/debian $(lsb_release -cs) stable"''',
"apt-get update -y",
"apt-get install docker-ce -y",
"docker run hello-world"
]
newDocker = server.Server(inputKeyPath="publickey.pem", inputKeyPassword='pass', inputServerIP="0.0.0.0" )
newDocker.set_commands(commandList=dockerInstructions)
newDocker.connect()
newDocker.run_commands() | 28.714286 | 115 | 0.672886 | 106 | 804 | 5.075472 | 0.518868 | 0.111524 | 0.104089 | 0.208178 | 0.096654 | 0.096654 | 0.096654 | 0 | 0 | 0 | 0 | 0.022091 | 0.155473 | 804 | 28 | 116 | 28.714286 | 0.77025 | 0.072139 | 0 | 0.105263 | 0 | 0 | 0.489731 | 0.041074 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.052632 | 0.052632 | 0 | 0.052632 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
56c5a8cf4cfd02c84dde21b4def9e05ee3a6e235 | 2,300 | py | Python | .github/workflows/scripts/check_java_version.py | cezary986/hello-github-actions | 3f178fbc32c8b60d598e2fb0361f31c356ea7b66 | [
"MIT"
] | null | null | null | .github/workflows/scripts/check_java_version.py | cezary986/hello-github-actions | 3f178fbc32c8b60d598e2fb0361f31c356ea7b66 | [
"MIT"
] | 3 | 2020-05-12T10:39:35.000Z | 2020-05-12T11:24:05.000Z | .github/workflows/scripts/check_java_version.py | cezary986/hello-github-actions | 3f178fbc32c8b60d598e2fb0361f31c356ea7b66 | [
"MIT"
] | null | null | null | import sys
import os
import re
import json
import base64
REFERENCE_BRANCH_NAME = 'master'
GRADLE_FILE_PATH = './build.gradle'
GET_GRADLE_COMMAND = f'git show {REFERENCE_BRANCH_NAME} HEAD:{GRADLE_FILE_PATH}'
class Version:
def __init__(self, version_string: str):
parts = version_string.split('.')
if len(parts) > 3:
raise Exception('Invalid version format')
else:
self._major = int(parts[0])
self._minor = int(parts[1])
self._patch = int(parts[2])
def major(self):
self._major += 1
def minor(self):
self._minor += 1
def patch(self):
self._patch += 1
def __eq__(self, other):
return (self.to_number() == other.to_number())
def __ne__(self, other):
return (self.to_number() != other.to_number())
def __lt__(self, other):
return (self.to_number() < other.to_number())
def __le__(self, other):
return (self.to_number() <= other.to_number())
def __gt__(self, other):
return (self.to_number() > other.to_number())
def __ge__(self, other):
return (self.to_number() >= other.to_number())
def to_number(self):
return (self._major * 100) + (self._minor * 10) + self._patch
def __str__(self):
return f'{self._major}.{self._minor}.{self._patch}'
def get_build_gradle_from_branch(branch_name: str) -> str:
return os.popen(GET_GRADLE_COMMAND).read()
def get_version_from_gradle(build_gradle_content: str) -> Version:
matches = re.findall(r"-*\+*version\s*=*\s*'\S+'\n", build_gradle_content)
print(matches)
max_version = None
for match in matches:
version_line = match
version_string = version_line.split("'")[1::2][0]
version = Version(version_string)
if max_version is None or version > max_version:
max_version = version
return max_version
if __name__ == "__main__":
build_gradle_file = open(GRADLE_FILE_PATH, "r")
my_build_gradle_content = build_gradle_file.read()
their_build_gradle_content = get_build_gradle_from_branch(REFERENCE_BRANCH_NAME)
my_version = get_version_from_gradle(my_build_gradle_content)
their_version = get_version_from_gradle(their_build_gradle_content)
if my_version <= their_version:
print(f'Local version ({str(my_version)}) lower or equal than their version ({str(their_version)}).')
exit(1)
exit(0) | 28.04878 | 105 | 0.70087 | 331 | 2,300 | 4.459215 | 0.250755 | 0.070461 | 0.060976 | 0.077236 | 0.243902 | 0.174797 | 0.174797 | 0.174797 | 0.174797 | 0.174797 | 0 | 0.009937 | 0.168696 | 2,300 | 82 | 106 | 28.04878 | 0.762029 | 0 | 0 | 0 | 0 | 0 | 0.116471 | 0.059539 | 0 | 0 | 0 | 0 | 0 | 1 | 0.225806 | false | 0 | 0.080645 | 0.145161 | 0.483871 | 0.032258 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
56c67af805accf44e8f758d5853f747ef0bec3dd | 87 | py | Python | Chapter 07/Chap07_Example7.96.py | bpbpublications/Programming-Techniques-using-Python | 49b785f37e95a3aad1d36cef51e219ac56e5e9f0 | [
"MIT"
] | null | null | null | Chapter 07/Chap07_Example7.96.py | bpbpublications/Programming-Techniques-using-Python | 49b785f37e95a3aad1d36cef51e219ac56e5e9f0 | [
"MIT"
] | null | null | null | Chapter 07/Chap07_Example7.96.py | bpbpublications/Programming-Techniques-using-Python | 49b785f37e95a3aad1d36cef51e219ac56e5e9f0 | [
"MIT"
] | null | null | null | myfruits = {"Apple", "Banana", "Grapes", "Litchi", "Mango"}
myfruits.remove('Guava')
| 29 | 60 | 0.632184 | 9 | 87 | 6.111111 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114943 | 87 | 2 | 61 | 43.5 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0.388235 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
56cf63f6be5f6939930d8a50e929022411806fe2 | 5,051 | py | Python | userbot/plugins/Gban.py | techyminati/DeOXy | 014efbf6ba4ba31525f996e935279e8918c8ba96 | [
"Apache-2.0"
] | 1 | 2020-08-27T12:54:28.000Z | 2020-08-27T12:54:28.000Z | userbot/plugins/Gban.py | techyminati/DeOXy | 014efbf6ba4ba31525f996e935279e8918c8ba96 | [
"Apache-2.0"
] | null | null | null | userbot/plugins/Gban.py | techyminati/DeOXy | 014efbf6ba4ba31525f996e935279e8918c8ba96 | [
"Apache-2.0"
] | 6 | 2020-08-17T16:11:18.000Z | 2020-11-03T16:06:46.000Z | #Imported to DeOXy
from userbot import bot, BOTLOG_CHATID, ALIVE_NAME, CMD_LIST
import asyncio
from telethon import events
from telethon.tl.functions.channels import EditBannedRequest
from telethon.tl.types import (PeerChat, PeerChannel,ChannelParticipantsAdmins, ChatAdminRights,ChatBannedRights, MessageEntityMentionName,MessageMediaPhoto, ChannelParticipantsBots)
from telethon.tl.types import Channel
from telethon.tl.functions.contacts import BlockRequest, UnblockRequest
client = telebot = bot
from telethon.tl.functions.messages import GetCommonChatsRequest
ALIVE_NAME = str(ALIVE_NAME)
from telethon.events import ChatAction
# Imported from @javes05
# Kangers keep the credits -_-
@command(outgoing=True, pattern="^.gban(?: |$)(.*)")
async def startgban(tb):
oof = tb ; sender = await oof.get_sender() ; me = await oof.client.get_me()
if not sender.id == me.id:
tele = await oof.reply("`Processing...`")
else:
tele = await oof.edit("`Processing...`")
me = await tb.client.get_me() ; await tele.edit(f"`{ALIVE_NAME}:` **Globally Banning user!**") ; my_mention = "[{}](tg://user?id={})".format(me.first_name, me.id) ; my_username = f"@{me.username}" if me.username else my_mention ; chat = await tb.get_chat() ; a = b = 0
if tb.is_private:
user = tb.chat ; reason = tb.pattern_match.group(1) ; chat_title = 'PM'
else:
chat_title = tb.chat.title
try:
user, reason = await get_user_from_event(tb)
except:
pass
try:
if not reason:
reason = 'Private'
except:
return await tele.edit(f"`{ALIVE_NAME}:`**User not found.**")
if user:
if user.id == 767014786:
return await tele.edit(f"`{ALIVE_NAME}:`**DeOXy MASTER: Denied.**")
try:
from userbot.modules.sql_helper.gmute_sql import gmute
except:
pass
try:
await tb.client(BlockRequest(user))
block = 'True'
except:
pass
testtb = [d.entity.id for d in await tb.client.get_dialogs() if (d.is_group or d.is_channel) ]
for i in testtb:
try:
await tb.client.edit_permissions(i, user, view_messages=False)
a += 1
await tele.edit(f"`{ALIVE_NAME}:` **Global Banning User!\nGbanned {a} chats.....**")
except:
b += 1
else:
await tele.edit(f"`{ALIVE_NAME}:` **Reply to a user !! **")
try:
if gmute(user.id) is False:
return await tele.edit(f"`{ALIVE_NAME}:`**DeOXy MATER: User already Gbanned**")
except:
pass
return await tele.edit(f"`{ALIVE_NAME}:` **Gbanned [{user.first_name}](tg://user?id={user.id}) in {a} chat(s) **")
@command(outgoing=True, pattern="^;ungban(?: |$)(.*)")
async def regressgban(tb):
oof = tb ; sender = await oof.get_sender() ; me = await oof.client.get_me()
if not sender.id == me.id:
tele = await oof.reply("`Processing...`")
else:
tele = await oof.edit("`processing...`")
me = await tb.client.get_me() ; await tele.edit(f"`{ALIVE_NAME}:` **Requesting to UnGban user!**") ; my_mention = "[{}](tg://user?id={})".format(me.first_name, me.id) ; my_username = f"@{me.username}" if me.username else my_mention ; chat = await tb.get_chat() ; a = b = 0
if tb.is_private:
user = tb.chat ; reason = tb.pattern_match.group(1) ; chat_title = 'PM'
else:
chat_title = tb.chat.title
try:
user, reason = await get_user_from_event(tb)
except:
pass
try:
if not reason:
reason = 'Private'
except:
return await tele.edit(f"`{ALIVE_NAME}:`**DeOXy MASTER: User not found. Invalid argument**")
if user:
if user.id == 767014786:
return await tele.edit(f"`{ALIVE_NAME}:`**DeOXy MASTER: Denied.**")
try:
from userbot.modules.sql_helper.gmute_sql import ungmute
except:
pass
try:
await tb.client(UnblockRequest(user))
block = 'True'
except:
pass
testtb = [d.entity.id for d in await tb.client.get_dialogs() if (d.is_group or d.is_channel) ]
for i in testtb:
try:
await tb.client.edit_permissions(i, user, send_messages=True)
a += 1
await tele.edit(f"`{ALIVE_NAME}:` **Requesting to ungban user!\nunGbanned {a} chats.....**")
except:
b += 1
else:
await tele.edit(f"`{ALIVE_NAME}:` **DeOXy MASTER: User not found, Reply to a user**")
try:
if ungmute(user.id) is False:
return await tele.edit(f"`{ALIVE_NAME}:`**DeOXy MASTER: Invalid argument, Already Gbanned**")
except:
pass
return await tele.edit(f"`{ALIVE_NAME}:` **UnGbanned [{user.first_name}](tg://user?id={user.id}) in {a} chat(s) **")
| 42.445378 | 276 | 0.586814 | 644 | 5,051 | 4.5 | 0.200311 | 0.052795 | 0.062802 | 0.067633 | 0.713596 | 0.696342 | 0.666667 | 0.666667 | 0.65735 | 0.65528 | 0 | 0.007659 | 0.276183 | 5,051 | 118 | 277 | 42.805085 | 0.785011 | 0.013661 | 0 | 0.66055 | 0 | 0.018349 | 0.199839 | 0.052018 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.073395 | 0.100917 | 0 | 0.174312 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
56eb43911a555b4c47f0e76fd588f8f5e30f1599 | 3,648 | py | Python | src/micropython/microbit/__model/compass.py | julianrendell/vscode-python-devicesimulator | 8014a940c9a0551793bfb5694bff9a52df6c0155 | [
"MIT"
] | 151 | 2019-11-05T10:10:29.000Z | 2022-02-18T11:46:27.000Z | src/micropython/microbit/__model/compass.py | julianrendell/vscode-python-devicesimulator | 8014a940c9a0551793bfb5694bff9a52df6c0155 | [
"MIT"
] | 98 | 2019-11-09T17:41:30.000Z | 2021-12-17T23:05:01.000Z | src/micropython/microbit/__model/compass.py | julianrendell/vscode-python-devicesimulator | 8014a940c9a0551793bfb5694bff9a52df6c0155 | [
"MIT"
] | 42 | 2019-11-10T02:26:27.000Z | 2022-03-22T01:43:01.000Z | from common import utils
from common.telemetry import telemetry_py
from common.telemetry_events import TelemetryEvent
class Compass:
# The implementation is based off of https://microbit-micropython.readthedocs.io/en/v1.0.1/compass.html.
def calibrate(self):
"""
This function is not implemented in the simulator.
Starts the calibration process. When this function is called on the physical device, an instructive message will be scrolled to the user after which they will need to rotate the device in order to draw a circle on the LED display on the actual device.
"""
utils.print_for_unimplemented_functions(Compass.calibrate.__name__)
telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_COMPASS)
def is_calibrated(self):
"""
This function is not implemented in the simulator.
Returns ``True`` if the compass has been successfully calibrated, and
returns ``False`` otherwise.
"""
utils.print_for_unimplemented_functions(Compass.is_calibrated.__name__)
telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_COMPASS)
def clear_calibration(self):
"""
This function is not implemented in the simulator.
Undoes the calibration, making the compass uncalibrated again.
"""
utils.print_for_unimplemented_functions(Compass.clear_calibration.__name__)
telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_COMPASS)
def get_x(self):
"""
This function is not implemented in the simulator.
Gives the reading of the magnetic field strength on the ``x`` axis in nano
tesla, as a positive or negative integer, depending on the direction of the
field.
"""
utils.print_for_unimplemented_functions(Compass.get_x.__name__)
telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_COMPASS)
def get_y(self):
"""
This function is not implemented in the simulator.
Gives the reading of the magnetic field strength on the ``y`` axis in nano
tesla, as a positive or negative integer, depending on the direction of the
field.
"""
utils.print_for_unimplemented_functions(Compass.get_y.__name__)
telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_COMPASS)
def get_z(self):
"""
This function is not implemented in the simulator.
Gives the reading of the magnetic field strength on the ``z`` axis in nano
tesla, as a positive or negative integer, depending on the direction of the
field.
"""
utils.print_for_unimplemented_functions(Compass.get_z.__name__)
telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_COMPASS)
def heading(self):
"""
This function is not implemented in the simulator.
Gives the compass heading, calculated from the above readings, as an
integer in the range from 0 to 360, representing the angle in degrees,
clockwise, with north as 0.
"""
utils.print_for_unimplemented_functions(Compass.heading.__name__)
telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_COMPASS)
def get_field_strength(self):
"""
This function is not implemented in the simulator.
Returns an integer indication of the magnitude of the magnetic field around
the device in nano tesla.
"""
utils.print_for_unimplemented_functions(Compass.get_field_strength.__name__)
telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_COMPASS)
| 40.988764 | 259 | 0.705044 | 461 | 3,648 | 5.353579 | 0.258134 | 0.040113 | 0.051053 | 0.058347 | 0.655186 | 0.655186 | 0.587115 | 0.568882 | 0.568882 | 0.507293 | 0 | 0.002874 | 0.236842 | 3,648 | 88 | 260 | 41.454545 | 0.883621 | 0.459704 | 0 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0.607143 | 0.107143 | 0 | 0.428571 | 0.285714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
56ef5b761db8737bcaf9e921790f38bfdd5880da | 258 | py | Python | posts/tests.py | Code-Institute-Submissions/Dermomurphy-MS4-DJ-ecomSite_MayResub | b1925d1b75e777be31b658698025f6aa963d97b0 | [
"MIT"
] | null | null | null | posts/tests.py | Code-Institute-Submissions/Dermomurphy-MS4-DJ-ecomSite_MayResub | b1925d1b75e777be31b658698025f6aa963d97b0 | [
"MIT"
] | null | null | null | posts/tests.py | Code-Institute-Submissions/Dermomurphy-MS4-DJ-ecomSite_MayResub | b1925d1b75e777be31b658698025f6aa963d97b0 | [
"MIT"
] | 1 | 2021-02-28T12:00:41.000Z | 2021-02-28T12:00:41.000Z | from django.test import TestCase
from posts.models import Posts
from datetime import datetime
# Create your tests here.
class PostTestCase(TestCase):
def setUp(self):
Posts.objects.create(title='Test1', body='Test text',created_on=datetime.now)
| 28.666667 | 85 | 0.763566 | 36 | 258 | 5.444444 | 0.694444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004525 | 0.143411 | 258 | 8 | 86 | 32.25 | 0.882353 | 0.089147 | 0 | 0 | 0 | 0 | 0.060086 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.5 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
56f4e87901fdbfcd99ffadd07ef472398a26666d | 28,995 | py | Python | pysnmp-with-texts/CISCO-UNIFIED-COMPUTING-SYSFILE-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/CISCO-UNIFIED-COMPUTING-SYSFILE-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/CISCO-UNIFIED-COMPUTING-SYSFILE-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-UNIFIED-COMPUTING-SYSFILE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-UNIFIED-COMPUTING-SYSFILE-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:17:44 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "ConstraintsUnion")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
CiscoAlarmSeverity, TimeIntervalSec, Unsigned64, CiscoInetAddressMask, CiscoNetworkAddress = mibBuilder.importSymbols("CISCO-TC", "CiscoAlarmSeverity", "TimeIntervalSec", "Unsigned64", "CiscoInetAddressMask", "CiscoNetworkAddress")
ciscoUnifiedComputingMIBObjects, CucsManagedObjectId, CucsManagedObjectDn = mibBuilder.importSymbols("CISCO-UNIFIED-COMPUTING-MIB", "ciscoUnifiedComputingMIBObjects", "CucsManagedObjectId", "CucsManagedObjectDn")
CucsFsmFsmStageStatus, CucsSysfileMutationFsmStageName, CucsFsmFlags, CucsConditionRemoteInvRslt, CucsSysfileMutationAction, CucsSysfileMutationFsmTaskItem, CucsFsmCompletion, CucsNetworkSwitchId, CucsSysfileMutationFsmCurrentFsm = mibBuilder.importSymbols("CISCO-UNIFIED-COMPUTING-TC-MIB", "CucsFsmFsmStageStatus", "CucsSysfileMutationFsmStageName", "CucsFsmFlags", "CucsConditionRemoteInvRslt", "CucsSysfileMutationAction", "CucsSysfileMutationFsmTaskItem", "CucsFsmCompletion", "CucsNetworkSwitchId", "CucsSysfileMutationFsmCurrentFsm")
InetAddressIPv6, InetAddressIPv4 = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressIPv6", "InetAddressIPv4")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Gauge32, Unsigned32, iso, ModuleIdentity, MibIdentifier, IpAddress, ObjectIdentity, Counter32, Integer32, Counter64, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Unsigned32", "iso", "ModuleIdentity", "MibIdentifier", "IpAddress", "ObjectIdentity", "Counter32", "Integer32", "Counter64", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "NotificationType")
TimeStamp, TextualConvention, MacAddress, DisplayString, RowPointer, TimeInterval, TruthValue, DateAndTime = mibBuilder.importSymbols("SNMPv2-TC", "TimeStamp", "TextualConvention", "MacAddress", "DisplayString", "RowPointer", "TimeInterval", "TruthValue", "DateAndTime")
cucsSysfileObjects = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48))
if mibBuilder.loadTexts: cucsSysfileObjects.setLastUpdated('201601180000Z')
if mibBuilder.loadTexts: cucsSysfileObjects.setOrganization('Cisco Systems Inc.')
if mibBuilder.loadTexts: cucsSysfileObjects.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553 -NETS E-mail: cs-san@cisco.com, cs-lan-switch-snmp@cisco.com')
if mibBuilder.loadTexts: cucsSysfileObjects.setDescription('MIB representation of the Cisco Unified Computing System SYSFILE management information model package')
cucsSysfileDigestTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3), )
if mibBuilder.loadTexts: cucsSysfileDigestTable.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestTable.setDescription('Cisco UCS sysfile:Digest managed object table')
cucsSysfileDigestEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-SYSFILE-MIB", "cucsSysfileDigestInstanceId"))
if mibBuilder.loadTexts: cucsSysfileDigestEntry.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestEntry.setDescription('Entry for the cucsSysfileDigestTable table.')
cucsSysfileDigestInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsSysfileDigestInstanceId.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestInstanceId.setDescription('Instance identifier of the managed object.')
cucsSysfileDigestDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestDn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestDn.setDescription('Cisco UCS sysfile:Digest:dn managed object property')
cucsSysfileDigestRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestRn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestRn.setDescription('Cisco UCS sysfile:Digest:rn managed object property')
cucsSysfileDigestCreationTS = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 4), Unsigned64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestCreationTS.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestCreationTS.setDescription('Cisco UCS sysfile:Digest:creationTS managed object property')
cucsSysfileDigestDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestDescr.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestDescr.setDescription('Cisco UCS sysfile:Digest:descr managed object property')
cucsSysfileDigestName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestName.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestName.setDescription('Cisco UCS sysfile:Digest:name managed object property')
cucsSysfileDigestSize = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestSize.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestSize.setDescription('Cisco UCS sysfile:Digest:size managed object property')
cucsSysfileDigestSource = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestSource.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestSource.setDescription('Cisco UCS sysfile:Digest:source managed object property')
cucsSysfileDigestSwitchId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 9), CucsNetworkSwitchId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestSwitchId.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestSwitchId.setDescription('Cisco UCS sysfile:Digest:switchId managed object property')
cucsSysfileDigestTs = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 10), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestTs.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestTs.setDescription('Cisco UCS sysfile:Digest:ts managed object property')
cucsSysfileDigestUri = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 3, 1, 11), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileDigestUri.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileDigestUri.setDescription('Cisco UCS sysfile:Digest:uri managed object property')
cucsSysfileMutationTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1), )
if mibBuilder.loadTexts: cucsSysfileMutationTable.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationTable.setDescription('Cisco UCS sysfile:Mutation managed object table')
cucsSysfileMutationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-SYSFILE-MIB", "cucsSysfileMutationInstanceId"))
if mibBuilder.loadTexts: cucsSysfileMutationEntry.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationEntry.setDescription('Entry for the cucsSysfileMutationTable table.')
cucsSysfileMutationInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsSysfileMutationInstanceId.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationInstanceId.setDescription('Instance identifier of the managed object.')
cucsSysfileMutationDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationDn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationDn.setDescription('Cisco UCS sysfile:Mutation:dn managed object property')
cucsSysfileMutationRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationRn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationRn.setDescription('Cisco UCS sysfile:Mutation:rn managed object property')
cucsSysfileMutationAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 4), CucsSysfileMutationAction()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationAction.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationAction.setDescription('Cisco UCS sysfile:Mutation:action managed object property')
cucsSysfileMutationDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationDescr.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationDescr.setDescription('Cisco UCS sysfile:Mutation:descr managed object property')
cucsSysfileMutationFsmDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmDescr.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmDescr.setDescription('Cisco UCS sysfile:Mutation:fsmDescr managed object property')
cucsSysfileMutationFsmPrev = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmPrev.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmPrev.setDescription('Cisco UCS sysfile:Mutation:fsmPrev managed object property')
cucsSysfileMutationFsmProgr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmProgr.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmProgr.setDescription('Cisco UCS sysfile:Mutation:fsmProgr managed object property')
cucsSysfileMutationFsmRmtInvErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtInvErrCode.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtInvErrCode.setDescription('Cisco UCS sysfile:Mutation:fsmRmtInvErrCode managed object property')
cucsSysfileMutationFsmRmtInvErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtInvErrDescr.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtInvErrDescr.setDescription('Cisco UCS sysfile:Mutation:fsmRmtInvErrDescr managed object property')
cucsSysfileMutationFsmRmtInvRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtInvRslt.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtInvRslt.setDescription('Cisco UCS sysfile:Mutation:fsmRmtInvRslt managed object property')
cucsSysfileMutationFsmStageDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageDescr.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageDescr.setDescription('Cisco UCS sysfile:Mutation:fsmStageDescr managed object property')
cucsSysfileMutationFsmStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 13), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStamp.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStamp.setDescription('Cisco UCS sysfile:Mutation:fsmStamp managed object property')
cucsSysfileMutationFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 14), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStatus.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStatus.setDescription('Cisco UCS sysfile:Mutation:fsmStatus managed object property')
cucsSysfileMutationFsmTry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 1, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmTry.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTry.setDescription('Cisco UCS sysfile:Mutation:fsmTry managed object property')
cucsSysfileMutationFsmTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4), )
if mibBuilder.loadTexts: cucsSysfileMutationFsmTable.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTable.setDescription('Cisco UCS sysfile:MutationFsm managed object table')
cucsSysfileMutationFsmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-SYSFILE-MIB", "cucsSysfileMutationFsmInstanceId"))
if mibBuilder.loadTexts: cucsSysfileMutationFsmEntry.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmEntry.setDescription('Entry for the cucsSysfileMutationFsmTable table.')
cucsSysfileMutationFsmInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsSysfileMutationFsmInstanceId.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmInstanceId.setDescription('Instance identifier of the managed object.')
cucsSysfileMutationFsmDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmDn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmDn.setDescription('Cisco UCS sysfile:MutationFsm:dn managed object property')
cucsSysfileMutationFsmRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmRn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmRn.setDescription('Cisco UCS sysfile:MutationFsm:rn managed object property')
cucsSysfileMutationFsmCompletionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmCompletionTime.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmCompletionTime.setDescription('Cisco UCS sysfile:MutationFsm:completionTime managed object property')
cucsSysfileMutationFsmCurrentFsm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 5), CucsSysfileMutationFsmCurrentFsm()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmCurrentFsm.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmCurrentFsm.setDescription('Cisco UCS sysfile:MutationFsm:currentFsm managed object property')
cucsSysfileMutationFsmDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmDescrData.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmDescrData.setDescription('Cisco UCS sysfile:MutationFsm:descr managed object property')
cucsSysfileMutationFsmFsmStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 7), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmFsmStatus.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmFsmStatus.setDescription('Cisco UCS sysfile:MutationFsm:fsmStatus managed object property')
cucsSysfileMutationFsmProgress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmProgress.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmProgress.setDescription('Cisco UCS sysfile:MutationFsm:progress managed object property')
cucsSysfileMutationFsmRmtErrCode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtErrCode.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtErrCode.setDescription('Cisco UCS sysfile:MutationFsm:rmtErrCode managed object property')
cucsSysfileMutationFsmRmtErrDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtErrDescr.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtErrDescr.setDescription('Cisco UCS sysfile:MutationFsm:rmtErrDescr managed object property')
cucsSysfileMutationFsmRmtRslt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 4, 1, 11), CucsConditionRemoteInvRslt()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtRslt.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmRmtRslt.setDescription('Cisco UCS sysfile:MutationFsm:rmtRslt managed object property')
cucsSysfileMutationFsmStageTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5), )
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageTable.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageTable.setDescription('Cisco UCS sysfile:MutationFsmStage managed object table')
cucsSysfileMutationFsmStageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-SYSFILE-MIB", "cucsSysfileMutationFsmStageInstanceId"))
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageEntry.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageEntry.setDescription('Entry for the cucsSysfileMutationFsmStageTable table.')
cucsSysfileMutationFsmStageInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageInstanceId.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageInstanceId.setDescription('Instance identifier of the managed object.')
cucsSysfileMutationFsmStageDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageDn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageDn.setDescription('Cisco UCS sysfile:MutationFsmStage:dn managed object property')
cucsSysfileMutationFsmStageRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageRn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageRn.setDescription('Cisco UCS sysfile:MutationFsmStage:rn managed object property')
cucsSysfileMutationFsmStageDescrData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 4), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageDescrData.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageDescrData.setDescription('Cisco UCS sysfile:MutationFsmStage:descr managed object property')
cucsSysfileMutationFsmStageLastUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 5), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageLastUpdateTime.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageLastUpdateTime.setDescription('Cisco UCS sysfile:MutationFsmStage:lastUpdateTime managed object property')
cucsSysfileMutationFsmStageName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 6), CucsSysfileMutationFsmStageName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageName.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageName.setDescription('Cisco UCS sysfile:MutationFsmStage:name managed object property')
cucsSysfileMutationFsmStageOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageOrder.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageOrder.setDescription('Cisco UCS sysfile:MutationFsmStage:order managed object property')
cucsSysfileMutationFsmStageRetry = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageRetry.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageRetry.setDescription('Cisco UCS sysfile:MutationFsmStage:retry managed object property')
cucsSysfileMutationFsmStageStageStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 5, 1, 9), CucsFsmFsmStageStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageStageStatus.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmStageStageStatus.setDescription('Cisco UCS sysfile:MutationFsmStage:stageStatus managed object property')
cucsSysfileMutationFsmTaskTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2), )
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskTable.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskTable.setDescription('Cisco UCS sysfile:MutationFsmTask managed object table')
cucsSysfileMutationFsmTaskEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2, 1), ).setIndexNames((0, "CISCO-UNIFIED-COMPUTING-SYSFILE-MIB", "cucsSysfileMutationFsmTaskInstanceId"))
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskEntry.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskEntry.setDescription('Entry for the cucsSysfileMutationFsmTaskTable table.')
cucsSysfileMutationFsmTaskInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2, 1, 1), CucsManagedObjectId())
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskInstanceId.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskInstanceId.setDescription('Instance identifier of the managed object.')
cucsSysfileMutationFsmTaskDn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2, 1, 2), CucsManagedObjectDn()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskDn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskDn.setDescription('Cisco UCS sysfile:MutationFsmTask:dn managed object property')
cucsSysfileMutationFsmTaskRn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2, 1, 3), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskRn.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskRn.setDescription('Cisco UCS sysfile:MutationFsmTask:rn managed object property')
cucsSysfileMutationFsmTaskCompletion = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2, 1, 4), CucsFsmCompletion()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskCompletion.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskCompletion.setDescription('Cisco UCS sysfile:MutationFsmTask:completion managed object property')
cucsSysfileMutationFsmTaskFlags = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2, 1, 5), CucsFsmFlags()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskFlags.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskFlags.setDescription('Cisco UCS sysfile:MutationFsmTask:flags managed object property')
cucsSysfileMutationFsmTaskItem = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2, 1, 6), CucsSysfileMutationFsmTaskItem()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskItem.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskItem.setDescription('Cisco UCS sysfile:MutationFsmTask:item managed object property')
cucsSysfileMutationFsmTaskSeqId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 719, 1, 48, 2, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskSeqId.setStatus('current')
if mibBuilder.loadTexts: cucsSysfileMutationFsmTaskSeqId.setDescription('Cisco UCS sysfile:MutationFsmTask:seqId managed object property')
mibBuilder.exportSymbols("CISCO-UNIFIED-COMPUTING-SYSFILE-MIB", cucsSysfileMutationFsmCurrentFsm=cucsSysfileMutationFsmCurrentFsm, cucsSysfileDigestName=cucsSysfileDigestName, cucsSysfileMutationFsmStageTable=cucsSysfileMutationFsmStageTable, cucsSysfileDigestUri=cucsSysfileDigestUri, cucsSysfileMutationFsmTaskInstanceId=cucsSysfileMutationFsmTaskInstanceId, cucsSysfileMutationFsmDn=cucsSysfileMutationFsmDn, cucsSysfileMutationFsmTaskItem=cucsSysfileMutationFsmTaskItem, cucsSysfileMutationFsmTaskCompletion=cucsSysfileMutationFsmTaskCompletion, cucsSysfileMutationFsmRmtErrDescr=cucsSysfileMutationFsmRmtErrDescr, cucsSysfileMutationFsmStageInstanceId=cucsSysfileMutationFsmStageInstanceId, cucsSysfileMutationFsmRmtInvRslt=cucsSysfileMutationFsmRmtInvRslt, cucsSysfileMutationFsmDescr=cucsSysfileMutationFsmDescr, cucsSysfileMutationFsmDescrData=cucsSysfileMutationFsmDescrData, cucsSysfileMutationFsmEntry=cucsSysfileMutationFsmEntry, cucsSysfileDigestSwitchId=cucsSysfileDigestSwitchId, cucsSysfileMutationFsmRn=cucsSysfileMutationFsmRn, cucsSysfileMutationEntry=cucsSysfileMutationEntry, cucsSysfileDigestSource=cucsSysfileDigestSource, cucsSysfileMutationFsmStatus=cucsSysfileMutationFsmStatus, cucsSysfileObjects=cucsSysfileObjects, cucsSysfileMutationFsmTaskFlags=cucsSysfileMutationFsmTaskFlags, cucsSysfileDigestTable=cucsSysfileDigestTable, cucsSysfileMutationFsmStageRetry=cucsSysfileMutationFsmStageRetry, cucsSysfileMutationFsmTaskTable=cucsSysfileMutationFsmTaskTable, cucsSysfileMutationFsmTable=cucsSysfileMutationFsmTable, cucsSysfileDigestCreationTS=cucsSysfileDigestCreationTS, cucsSysfileMutationFsmRmtRslt=cucsSysfileMutationFsmRmtRslt, cucsSysfileMutationFsmStageDescrData=cucsSysfileMutationFsmStageDescrData, cucsSysfileMutationFsmProgr=cucsSysfileMutationFsmProgr, cucsSysfileMutationFsmTaskEntry=cucsSysfileMutationFsmTaskEntry, cucsSysfileMutationFsmInstanceId=cucsSysfileMutationFsmInstanceId, cucsSysfileMutationFsmStageDn=cucsSysfileMutationFsmStageDn, cucsSysfileDigestDescr=cucsSysfileDigestDescr, cucsSysfileMutationFsmCompletionTime=cucsSysfileMutationFsmCompletionTime, cucsSysfileDigestEntry=cucsSysfileDigestEntry, cucsSysfileMutationFsmRmtInvErrDescr=cucsSysfileMutationFsmRmtInvErrDescr, cucsSysfileDigestInstanceId=cucsSysfileDigestInstanceId, PYSNMP_MODULE_ID=cucsSysfileObjects, cucsSysfileDigestSize=cucsSysfileDigestSize, cucsSysfileMutationFsmFsmStatus=cucsSysfileMutationFsmFsmStatus, cucsSysfileMutationAction=cucsSysfileMutationAction, cucsSysfileMutationFsmRmtInvErrCode=cucsSysfileMutationFsmRmtInvErrCode, cucsSysfileMutationInstanceId=cucsSysfileMutationInstanceId, cucsSysfileDigestDn=cucsSysfileDigestDn, cucsSysfileMutationTable=cucsSysfileMutationTable, cucsSysfileMutationFsmStamp=cucsSysfileMutationFsmStamp, cucsSysfileMutationFsmProgress=cucsSysfileMutationFsmProgress, cucsSysfileMutationFsmStageLastUpdateTime=cucsSysfileMutationFsmStageLastUpdateTime, cucsSysfileMutationFsmStageEntry=cucsSysfileMutationFsmStageEntry, cucsSysfileMutationFsmStageName=cucsSysfileMutationFsmStageName, cucsSysfileMutationFsmStageStageStatus=cucsSysfileMutationFsmStageStageStatus, cucsSysfileMutationFsmTaskRn=cucsSysfileMutationFsmTaskRn, cucsSysfileMutationRn=cucsSysfileMutationRn, cucsSysfileMutationFsmTry=cucsSysfileMutationFsmTry, cucsSysfileMutationFsmTaskSeqId=cucsSysfileMutationFsmTaskSeqId, cucsSysfileMutationFsmStageRn=cucsSysfileMutationFsmStageRn, cucsSysfileMutationDescr=cucsSysfileMutationDescr, cucsSysfileMutationFsmPrev=cucsSysfileMutationFsmPrev, cucsSysfileMutationFsmStageOrder=cucsSysfileMutationFsmStageOrder, cucsSysfileMutationFsmTaskDn=cucsSysfileMutationFsmTaskDn, cucsSysfileMutationFsmStageDescr=cucsSysfileMutationFsmStageDescr, cucsSysfileDigestTs=cucsSysfileDigestTs, cucsSysfileMutationFsmRmtErrCode=cucsSysfileMutationFsmRmtErrCode, cucsSysfileMutationDn=cucsSysfileMutationDn, cucsSysfileDigestRn=cucsSysfileDigestRn)
| 134.860465 | 3,906 | 0.820038 | 2,806 | 28,995 | 8.472915 | 0.100855 | 0.065615 | 0.114827 | 0.010768 | 0.463176 | 0.240463 | 0.201725 | 0.191209 | 0.110032 | 0.092114 | 0 | 0.044854 | 0.07115 | 28,995 | 214 | 3,907 | 135.490654 | 0.837925 | 0.012554 | 0 | 0 | 0 | 0.004831 | 0.213088 | 0.068688 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.057971 | 0 | 0.057971 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
56f7a55e1ecb0ebcbf31f7444aa06591df34f007 | 2,125 | py | Python | app_control/migrations/0002_initial_app_control.py | bkstudy/bk_kb_1 | e053eec6be989456afa17cb635831f7fbc18c624 | [
"Apache-2.0"
] | 4 | 2021-05-19T02:28:01.000Z | 2021-12-14T04:02:22.000Z | app_control/migrations/0002_initial_app_control.py | bkstudy/bk_kb_1 | e053eec6be989456afa17cb635831f7fbc18c624 | [
"Apache-2.0"
] | 6 | 2019-12-18T10:09:29.000Z | 2021-06-10T22:24:50.000Z | app_control/migrations/0002_initial_app_control.py | bkstudy/bk_kb_1 | e053eec6be989456afa17cb635831f7fbc18c624 | [
"Apache-2.0"
] | 1 | 2018-10-11T07:49:19.000Z | 2018-10-11T07:49:19.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云(BlueKing) available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
"""
from __future__ import unicode_literals
from django.db import migrations
from django.core import serializers
def initial_app_control_data(apps, schema_editor):
try:
# 初始化功能开关数据
func_data = [
{'model': 'app_control.FunctionController', 'fields': {'func_code': 'func_test', 'func_name': u"示例功能"}},
{'model': 'app_control.FunctionController', 'fields': {'func_code': 'create_task', 'func_name': u"创建任务"}},
{'model': 'app_control.FunctionController', 'fields': {'func_code': 'execute_task', 'func_name': u"执行任务"}},
{'model': 'app_control.FunctionController', 'fields': {'func_code': 'tasks', 'func_name': u"任务列表"}},
{'model': 'app_control.FunctionController', 'fields': {'func_code': 'task', 'func_name': u"任务详情"}},
{'model': 'app_control.FunctionController', 'fields': {'func_code': 'pause_task', 'func_name': u"任务暂停"}},
{'model': 'app_control.FunctionController', 'fields': {'func_code': 'terminate_task', 'func_name': u"任务终止"}},
]
func_obj = serializers.deserialize('python', func_data, ignorenonexistent=True)
for obj in func_obj:
obj.save()
except Exception, e:
print e
pass
class Migration(migrations.Migration):
dependencies = [
('app_control', '0001_initial'),
]
operations = [
migrations.RunPython(initial_app_control_data),
]
| 46.195652 | 121 | 0.680471 | 263 | 2,125 | 5.330798 | 0.51711 | 0.071327 | 0.074893 | 0.164765 | 0.234665 | 0.234665 | 0.234665 | 0 | 0 | 0 | 0 | 0.006403 | 0.191529 | 2,125 | 45 | 122 | 47.222222 | 0.809662 | 0.014588 | 0 | 0 | 0 | 0 | 0.376231 | 0.147679 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.037037 | 0.111111 | null | null | 0.037037 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
56fb092688f5317d00254a637a7273416245b4d9 | 1,816 | py | Python | tests/test_location.py | enthought/supplement | 76090598cb55c80f9f4d4caad87344a8ac9d8e11 | [
"MIT"
] | 1 | 2017-02-15T18:30:44.000Z | 2017-02-15T18:30:44.000Z | tests/test_location.py | enthought/supplement | 76090598cb55c80f9f4d4caad87344a8ac9d8e11 | [
"MIT"
] | 3 | 2015-04-09T14:31:45.000Z | 2016-08-26T13:01:47.000Z | tests/test_location.py | enthought/supplement | 76090598cb55c80f9f4d4caad87344a8ac9d8e11 | [
"MIT"
] | null | null | null | from .helpers import pytest_funcarg__project
def test_function_node_location(project):
m = project.create_module('test', '''
def test():
pass
''')
line, filename = m['test'].get_location()
assert line == 1
assert filename == 'test.py'
def test_assign_node_location(project):
m = project.create_module('test', '''
(test1,
test2) = 5, 10
''')
line, filename = m['test1'].get_location()
assert line == 2
assert filename == 'test.py'
line, filename = m['test2'].get_location()
assert line == 3
assert filename == 'test.py'
def test_class_location(project):
m = project.create_module('test', '''
class test:
pass
''')
line, filename = m['test'].get_location()
assert line == 2
assert filename == 'test.py'
def test_method_location(project):
m = project.create_module('test', '''
class test:
def test(self):
pass
''')
line, filename = m['test']['test'].get_location()
assert line == 3
assert filename == 'test.py'
def test_imported_location(project):
project.create_module('toimport', '''
test = 'test'
''')
m = project.create_module('test', '''
from toimport import test
''')
line, filename = m['test'].get_location()
assert line == 1
assert filename == 'toimport.py'
def test_super_method_location(project):
project.create_module('toimport', '''
class Foo(object):
def foo(self):
pass
''')
m = project.create_module('test', '''
from toimport import Foo
class Bar(Foo):
pass
''')
line, filename = m['Bar']['foo'].get_location()
assert line == 2
assert filename == 'toimport.py' | 22.146341 | 53 | 0.570485 | 207 | 1,816 | 4.845411 | 0.178744 | 0.055833 | 0.151545 | 0.14656 | 0.745763 | 0.720837 | 0.617149 | 0.581256 | 0.411765 | 0.257228 | 0 | 0.010753 | 0.28304 | 1,816 | 82 | 54 | 22.146341 | 0.759601 | 0 | 0 | 0.655738 | 0 | 0 | 0.313154 | 0 | 0 | 0 | 0 | 0 | 0.229508 | 1 | 0.098361 | false | 0.081967 | 0.131148 | 0 | 0.229508 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
7104c6048405b9052f215a66f26b258c85c0d654 | 914 | py | Python | src/CompartmentalSystems/bins/TsTpMassFieldsPerTimeStep.py | goujou/CompartmentalSystems | 4724555c33f11395ddc32738e8dfed7349ee155f | [
"MIT"
] | null | null | null | src/CompartmentalSystems/bins/TsTpMassFieldsPerTimeStep.py | goujou/CompartmentalSystems | 4724555c33f11395ddc32738e8dfed7349ee155f | [
"MIT"
] | null | null | null | src/CompartmentalSystems/bins/TsTpMassFieldsPerTimeStep.py | goujou/CompartmentalSystems | 4724555c33f11395ddc32738e8dfed7349ee155f | [
"MIT"
] | null | null | null | # vim: set ff=unix expandtab ts=4 sw=4:
import numpy as np
from .TsMassFieldsPerTimeStep import TsMassFieldsPerTimeStep
from .FieldsPerTimeStep import FieldsPerTimeStep
class TsTpMassFieldsPerTimeStep(FieldsPerTimeStep):
@property
def total_contents(self):
return [field.total_content for field in self]
def plot_total_contents(self, ax):
ax.plot(self.times, self.total_contents)
def system_age_distributions(self):
systemAgeVectors = TsMassFieldsPerTimeStep(
[field.sum_over_all_pool_ages() for field in self], self.start
)
return systemAgeVectors
def plot_system_age_distributions_with_bins(self, ax, mr=None, pool=None):
self.system_age_distributions().plot_bins(ax, mr, pool)
def plot_system_age_distributions_as_surfaces(self, ax, mr=None, pool=None):
self.system_age_distributions().plot_surface(ax, mr, pool)
| 35.153846 | 80 | 0.741794 | 115 | 914 | 5.66087 | 0.4 | 0.069124 | 0.168971 | 0.043011 | 0.242704 | 0.15361 | 0.15361 | 0.15361 | 0.15361 | 0.15361 | 0 | 0.002663 | 0.178337 | 914 | 25 | 81 | 36.56 | 0.864181 | 0.040481 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.277778 | false | 0 | 0.166667 | 0.055556 | 0.611111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
71188757a4a3122d3041dcb62629fe9ef707fc95 | 75 | py | Python | abc/166/A.py | tonko2/AtCoder | 5d617072517881d226d7c8af09cb88684d41af7e | [
"Xnet",
"X11",
"CECILL-B"
] | 2 | 2022-01-22T07:56:58.000Z | 2022-01-24T00:29:37.000Z | abc/166/A.py | tonko2/AtCoder | 5d617072517881d226d7c8af09cb88684d41af7e | [
"Xnet",
"X11",
"CECILL-B"
] | null | null | null | abc/166/A.py | tonko2/AtCoder | 5d617072517881d226d7c8af09cb88684d41af7e | [
"Xnet",
"X11",
"CECILL-B"
] | null | null | null | contests = ['ABC', 'ARC']
S = input()
contests.remove(S)
print(contests[0]) | 18.75 | 25 | 0.653333 | 11 | 75 | 4.454545 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014925 | 0.106667 | 75 | 4 | 26 | 18.75 | 0.716418 | 0 | 0 | 0 | 0 | 0 | 0.078947 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
711e169130e686398528c2631ac52499a472cbef | 3,068 | py | Python | simple_dispatch/dispatch.py | ossdev07/SimpleDispatch | 051556b4f8a12bd85575eaed102ba6bfb672def1 | [
"MIT"
] | null | null | null | simple_dispatch/dispatch.py | ossdev07/SimpleDispatch | 051556b4f8a12bd85575eaed102ba6bfb672def1 | [
"MIT"
] | null | null | null | simple_dispatch/dispatch.py | ossdev07/SimpleDispatch | 051556b4f8a12bd85575eaed102ba6bfb672def1 | [
"MIT"
] | null | null | null | from typing import Callable
_handlers = None
def connect(event_name: str, func: Callable):
"""
Connects a given function to be subscribed to the given event.
:param event_name: The name of the event to subscribe to.
:param func: The function that will be invoked when the event is published.
"""
global _handlers
# If we have not defined handlers, define them.
if not _handlers:
_handlers = dict()
# If this is the first handler for the function, create a list of just the one handler.
# Otherwise, append a new handler to the existing list.
if event_name not in _handlers:
_handlers[event_name] = [func]
else:
_handlers[event_name].append(func)
def dispatch(event_name: str, **kwargs):
"""
Dispatches a particular event to all subscribers with the given arguments. The subscribers
are assumed to accept kwargs. If not, an error will be incurred.
:param event_name: The name of the event that is being published.
:param kwargs: The arguments to pass to the handler functions.
"""
global _handlers
# If no handlers, quit early.
if not _handlers or event_name not in _handlers:
return
for handler in _handlers[event_name]:
handler(**kwargs)
def subscriber(*args):
"""
Subscribes the annotated callable to one or more events. You can either pass a single or more than one
event to this annotation
:param args: The list of hashable event declarations that we are going to subscribe to.
"""
def decorator(func):
for event in args:
connect(event, func)
return func
return decorator
def dispatch_before(event_name):
"""
Dispatches the defined event before the function is executed. args and kwargs are passed to the handler.
@subscriber('BEFORE_SOME_EVENT')
def my_handler(**kwargs):
caller_args = kwargs['func_args']
caller_kwargs = kwargs['func_kwargs']
:param event_name: The name of the event to dispatch.
"""
def decorator(func):
def wrapper(*args, **kwargs):
dispatch(event_name, func_args=args, func_kwargs=kwargs)
return func(*args, **kwargs)
return wrapper
return decorator
def dispatch_after(event_name):
"""
Dispatches the defined event after the function is executed and before the result is returned to the caller of the
annotated function. Additionally, the result or return of the function call is passed in the result kwarg.
@subscriber('AFTER_SOME_EVENT')
def my_handler(**kwargs):
caller_args = kwargs['func_args']
caller_kwargs = kwargs['func_kwargs']
caller_result = kwargs['result']
:param event_name: The name of the event to dispatch.
"""
def decorator(func):
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
dispatch(event_name, func_args=args, func_kwargs=kwargs, result=result)
return result
return wrapper
return decorator
| 29.786408 | 118 | 0.674055 | 414 | 3,068 | 4.881643 | 0.251208 | 0.066799 | 0.027709 | 0.033647 | 0.290945 | 0.269174 | 0.235527 | 0.235527 | 0.220188 | 0.203859 | 0 | 0 | 0.253911 | 3,068 | 102 | 119 | 30.078431 | 0.882918 | 0.544329 | 0 | 0.324324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.27027 | false | 0 | 0.027027 | 0 | 0.540541 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
7121f1f037c6646fc97580bcb231acfa36639d90 | 36,430 | py | Python | utils/synthetic_cell_nuclei_masks.py | stegmaierj/CellSynthesis | de2c90ed668b7f57b960896473df3d56636eca82 | [
"Apache-2.0"
] | 1 | 2021-07-21T21:40:32.000Z | 2021-07-21T21:40:32.000Z | utils/synthetic_cell_nuclei_masks.py | stegmaierj/CellSynthesis | de2c90ed668b7f57b960896473df3d56636eca82 | [
"Apache-2.0"
] | null | null | null | utils/synthetic_cell_nuclei_masks.py | stegmaierj/CellSynthesis | de2c90ed668b7f57b960896473df3d56636eca82 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
# 3D Image Data Synthesis.
# Copyright (C) 2021 D. Eschweiler, M. Rethwisch, M. Jarchow, S. Koppers, J. Stegmaier
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Liceense at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Please refer to the documentation for more information about the software
# as well as for installation instructions.
#
"""
import os
import pyshtools
import itertools
import numpy as np
from skimage import io, filters, morphology, measure
from scipy.stats import multivariate_normal
from scipy.ndimage import convolve, distance_transform_edt, gaussian_filter
from pyquaternion import Quaternion
from utils.utils import print_timestamp
from utils.harmonics import harmonics2sampling, sampling2instance
from utils.h5_converter import h5_writer
def generate_data(synthesizer, save_path, experiment_name='dummy_nuclei', num_imgs=50, img_shape=(140,140,1000), max_radius=40, min_radius=20, std_radius=10, psf=None,\
sh_order=20, num_cells=200, num_cells_std=50, circularity=5, smooth_std=0.5, noise_std=0.1, noise_mean=-0.1, position_std=3,\
cell_elongation=1.5, irregularity_extend=50, generate_images=False, theta_phi_sampling_file=r'utils/theta_phi_sampling_5000points_10000iter.npy'):
# Set up the synthesizer
synthesizer = synthesizer(img_shape=img_shape, max_radius=max_radius, min_radius=min_radius,\
smooth_std=smooth_std, noise_std=noise_std, noise_mean=noise_mean,\
sh_order=sh_order, circularity=circularity, num_cells=num_cells, psf=psf,\
position_std=position_std, theta_phi_sampling=theta_phi_sampling,\
cell_elongation=cell_elongation, irregularity_extend=irregularity_extend,
generate_images=generate_images)
# Set up the save directories
if generate_images:
os.makedirs(os.path.join(save_path, 'images'), exist_ok=True)
os.makedirs(os.path.join(save_path, 'masks'), exist_ok=True)
for num_data in range(num_imgs):
current_radius = np.random.randint(min_radius, max_radius)
synthesizer.max_radius = current_radius + std_radius
synthesizer.min_radius = current_radius - std_radius
cell_count = np.random.randint(num_cells-num_cells_std, num_cells+num_cells_std)
synthesizer.num_cells = cell_count
print_timestamp('_'*20)
print_timestamp('Generating image {0}/{1} with {2} cells of size {3}-{4}', [num_data+1, num_imgs, cell_count, current_radius-std_radius, current_radius+std_radius])
# Get the image and the corresponding mask
processed_img, instance_mask = synthesizer.generate_data()
## Save the image
for num_img,img in enumerate(processed_img):
if not img is None:
save_name_img = 'psf{0}_img_'.format(num_img)+experiment_name+'_{0}'.format(num_data)
# TIF
io.imsave(os.path.join(save_path, 'images', save_name_img+'.tif'), 255*img.astype(np.uint8))
# H5
img = img.astype(np.float32)
perc01, perc99 = np.percentile(img, [1,99])
if not perc99-perc01 <= 0:
img -= perc01
img /= (perc99-perc01)
else:
img /= img.max()
img = np.clip(img, 0, 1)
h5_writer([img], save_name_img+'.h5', group_root='data', group_names=['image'])
## Save the mask
save_name_mask = 'mask_'+experiment_name+'_{0}'.format(num_data)
# TIF
io.imsave(os.path.join(save_path, 'masks', save_name_mask+'.tif'), instance_mask.astype(np.uint16))
# H5
h5_writer([instance_mask, synthesizer.dist_map], os.path.join(save_path, 'masks', save_name_mask+'.h5'), group_root='data', group_names=['nuclei', 'distance'])
def generate_data_from_masks(synthesizer_class, save_path, filelist, min_radius=8, max_radius=9, std_radius=1, psf=None,\
sh_order=20, circularity=5, smooth_std=0.5, noise_std=0.1, noise_mean=-0.1, position_std=3, bg_label=0,\
cell_elongation=1.5, irregularity_extend=50, generate_images=False, theta_phi_sampling_file=r'utils/theta_phi_sampling_5000points_10000iter.npy'):
# Set up the synthesizer
synthesizer = synthesizer_class(img_shape=(100,100,100), max_radius=max_radius, min_radius=min_radius,\
smooth_std=smooth_std, noise_std=noise_std, noise_mean=noise_mean,\
sh_order=sh_order, circularity=circularity, num_cells=0, psf=psf,\
position_std=position_std, theta_phi_sampling_file=theta_phi_sampling_file,\
cell_elongation=cell_elongation, irregularity_extend=irregularity_extend,
generate_images=generate_images)
# Set up the save directories
if generate_images:
os.makedirs(os.path.join(save_path, 'images_h5'), exist_ok=True)
os.makedirs(os.path.join(save_path, 'segmentation'), exist_ok=True)
os.makedirs(os.path.join(save_path, 'segmentation_h5'), exist_ok=True)
for num_file, file in enumerate(filelist):
print_timestamp('_'*20)
print_timestamp('Extracting statistics from image {0}/{1}', [num_file+1, len(filelist)])
template = io.imread(file)
synthesizer.img_shape = template.shape
positions = []
for props in measure.regionprops(template):
positions.append([int(p) for p in props.centroid])
synthesizer.num_cells = len(positions)
current_radius = np.random.randint(min_radius, max_radius)
synthesizer.max_radius = current_radius + std_radius
synthesizer.min_radius = current_radius - std_radius
print_timestamp('Generating image with {0} cells of size {1}-{2}', [len(positions), current_radius-std_radius, current_radius+std_radius])
# Get the image and the corresponding mask
processed_img, instance_mask = synthesizer.generate_data(foreground=template!=bg_label, positions=positions)
## Save the image
for num_img,img in enumerate(processed_img):
if not img is None:
save_name_img = 'psf{0}_img_'.format(num_img)+os.path.split(file)[-1][:-4]
# TIF
io.imsave(os.path.join(save_path, 'images_h5', save_name_img+'.tif'), 255*img.astype(np.uint8))
# H5
img = img.astype(np.float32)
perc01, perc99 = np.percentile(img, [1,99])
if not perc99-perc01 <= 0:
img -= perc01
img /= (perc99-perc01)
else:
img /= img.max()
img = np.clip(img, 0, 1)
h5_writer([img], save_name_img+'.h5', group_root='data', group_names=['image'])
## Save the mask
save_name_mask = 'SimMask_'+os.path.split(file)[-1][:-4]
# TIF
io.imsave(os.path.join(save_path, 'segmentation', save_name_mask+'.tif'), instance_mask.astype(np.uint16))
# H5
h5_writer([instance_mask, synthesizer.dist_map], os.path.join(save_path, 'segmentation_h5', save_name_mask+'.h5'), group_root='data', group_names=['nuclei', 'distance'])
class SyntheticNuclei:
def __init__(self, img_shape=(200,400,400), max_radius=50, min_radius=20, psf=None, sh_order=20, smooth_std=1,\
noise_std=0.1, noise_mean=0, num_cells=10, circularity=5, generate_images=False,\
theta_phi_sampling_file=r'utils/theta_phi_sampling_5000points_10000iter.npy', **kwargs):
self.img_shape = img_shape
self.max_radius = max_radius
self.min_radius = min_radius
self.sh_order = sh_order
self.num_coefficients = (sh_order+1)**2
self.smooth_std = smooth_std
self.noise_std = noise_std
self.noise_mean = noise_mean
self.circularity = circularity
self.num_cells = num_cells
self.generate_images = generate_images
self.theta_phi_sampling_file = theta_phi_sampling_file
if not isinstance(psf, (tuple, list)):
psf = [psf]
self.psf = []
for p in psf:
if isinstance(p, str):
if psf.endswith(('.tif', '.TIF', 'png')):
self.psf.append(io.imread(psf))
elif psf.endswith(('.npz', '.npy')):
self.psf.append(np.load(p))
else:
raise TypeError('Unknown PSF file format.')
else:
self.psf.append(p)
self.fg_map = None
self.instance_mask = None
self.processed_img = [None]
self._preparations()
def _preparations(self):
# Setting up the converter
print_timestamp('Loading sampling angles...')
self.theta_phi_sampling = np.load(self.theta_phi_sampling_file)
print_timestamp('Setting up harmonic converter...')
self.h2s = harmonics2sampling(self.sh_order, self.theta_phi_sampling)
def generate_data(self, foreground=None, positions=None):
if foreground is None:
print_timestamp('Generating foreground...')
self._generate_foreground()
else:
self.fg_map = foreground>0
self._generate_distmap()
if positions is None:
print_timestamp('Determining cell positions...')
self.positions = self._generate_positions()
else:
self.positions = positions
print_timestamp('Starting cell generation...')
self._generate_instances()
if self.generate_images:
print_timestamp('Starting synthesis process...')
self._generate_image()
print_timestamp('Finished...')
return self.processed_img, self.instance_mask
def _generate_foreground(self):
self.fg_map = np.zeros(self.img_shape, dtype=np.bool)
def _generate_distmap(self):
# generate foreground distance map
fg_map = self.fg_map[::4,::4,::4]
dist_map = distance_transform_edt(fg_map>=1)
dist_map = dist_map - distance_transform_edt(fg_map<1)
dist_map = dist_map.astype(np.float32)
# rescale to original size
dist_map = np.repeat(dist_map, 4, axis=0)
dist_map = np.repeat(dist_map, 4, axis=1)
dist_map = np.repeat(dist_map, 4, axis=2)
dim_missmatch = np.array(self.fg_map.shape)-np.array(dist_map.shape)
if dim_missmatch[0]<0: dist_map = dist_map[:dim_missmatch[0],...]
if dim_missmatch[1]<0: dist_map = dist_map[:,:dim_missmatch[1],:]
if dim_missmatch[2]<0: dist_map = dist_map[...,:dim_missmatch[2]]
dist_map = dist_map.astype(np.float32)
self.dist_map = dist_map
def _generate_positions(self):
positions = np.zeros((self.num_cells, 3), dtype=np.uint16)
# Get map of possible cell locations
location_map = self.fg_map.copy()
cell_size_est = (self.min_radius + self.max_radius) // 2
slicing = tuple(map(slice, [cell_size_est,]*len(self.img_shape), [s-cell_size_est for s in self.img_shape]))
location_map[slicing] = True
for cell_count in range(self.num_cells):
# Get random centroid
location = np.array(np.nonzero(location_map))
location = location[:,np.random.randint(0, location.shape[1])]
positions[cell_count,:] = location
# Exclude region of current cell from possible future locations
slicing = tuple(map(slice, list(np.maximum(location-cell_size_est, 0)), list(location+cell_size_est)))
location_map[slicing] = False
return positions
def _generate_instances(self):
assert self.circularity>=0, 'Circularity needs to be positive.'
# Get the power per harmonic order
power_per_order = np.arange(self.sh_order+1, dtype=np.float32)
power_per_order[0] = np.inf
power_per_order = power_per_order**-self.circularity
coeff_list = np.zeros((len(self.positions), self.num_coefficients), dtype=np.float32)
for cell_count in range(len(self.positions)):
# Get harmonic coefficients
clm = pyshtools.SHCoeffs.from_random(power_per_order)
coeffs = clm.coeffs
coeffs[0,0,0] = 1
# Get radius
radius = np.random.randint(self.min_radius, self.max_radius)
# Scale coefficients respectively
coeffs *= radius
coeffs = np.concatenate((np.fliplr(coeffs[0,...]), coeffs[1,...]), axis=1)
coeffs = coeffs[np.nonzero(coeffs)]
assert len(coeffs) == self.num_coefficients, 'Number of coefficients did not match the expected value.'
coeff_list[cell_count,:] = coeffs
# Reconstruct the sampling from the coefficients
r_sampling = self.h2s.convert(coeff_list)
# Reconstruct the intance mask
instance_mask = sampling2instance(self.positions, r_sampling, self.theta_phi_sampling, self.img_shape, verbose=True)
self.instance_mask = instance_mask
def _generate_image(self):
assert not self.instance_mask is None, 'There needs to be an instance mask.'
# Generate image
img_raw = np.zeros_like(self.instance_mask, dtype=np.float32)
for label in np.unique(self.instance_mask):
if label == 0: continue # exclude background
img_raw[self.instance_mask == label] = np.random.uniform(0.5, 0.9)
self.processed_img = []
for num_psf,psf in enumerate(self.psf):
print_timestamp('Applying PSF {0}/{1}...', [num_psf+1, len(self.psf)])
img = img_raw.copy()
# Perform PSF smoothing
if not psf is None:
img = convolve(img, psf)
# Add final additive noise
noise = np.random.normal(self.noise_mean, self.noise_std, size=self.img_shape)
img = img+noise
img = img.clip(0, 1)
# Final smoothing touch
img = filters.gaussian(img, self.smooth_std)
self.processed_img.append(img.astype(np.float32))
class SyntheticCElegansWorm(SyntheticNuclei):
def __init__(self, img_shape=(140,140,1000), max_radius=20, min_radius=10, num_cells=400,\
psf=None, sh_order=20, smooth_std=0.5, noise_std=0.1, noise_mean=-0.1, circularity=5,\
theta_phi_sampling_file=r'utils/theta_phi_sampling_5000points_10000iter.npy', **kwargs):
super().__init__(img_shape=img_shape, max_radius=max_radius, min_radius=min_radius, num_cells=num_cells,\
psf=psf, sh_order=sh_order, smooth_std=smooth_std, noise_mean=noise_mean,\
noise_std=noise_std, circularity=circularity, theta_phi_sampling_file=theta_phi_sampling_file)
def _generate_foreground(self):
# within ellipsoid equation: (x/a)^2 + (y/b)^2 + /z/c)^2 < 1
a,b,c = [int(i*0.45) for i in self.img_shape]
x,y,z = np.indices(self.img_shape)
ellipsoid = ((x-self.img_shape[0]//2)/a)**2 + ((y-self.img_shape[1]//2)/b)**2 + ((z-self.img_shape[2]//2)/c)**2
self.fg_map = ellipsoid<=1
def _generate_positions(self):
positions = np.zeros((self.num_cells, 3), dtype=np.uint16)
# Get map of possible cell locations
location_map = self.fg_map.copy()
for cell_count in range(self.num_cells):
print_timestamp('Placing cell {0}/{1}...', [cell_count+1, self.num_cells])
# Get random centroid
location = np.array(np.nonzero(location_map))
if location.shape[1] == 0:
print_timestamp('The maximum number of cells ({0}) was reached...', [cell_count+1])
positions = positions[:cell_count-1,:]
break
location = location[:,np.random.randint(0, location.shape[1])]
positions[cell_count,:] = location
# Exclude region of current cell from possible future locations
slicing = tuple(map(slice, list(np.maximum(location-self.min_radius, 0)), list(location+self.min_radius)))
location_map[slicing] = False
return positions
class SyntheticTRIF(SyntheticNuclei):
def __init__(self, img_shape=(900,1800,900), min_radius=13, max_radius=18, cell_elongation=2, num_cells=3500, psf=None,\
smooth_std=0.5, noise_std=0.1, noise_mean=-0.1, position_std=3, irregularity_extend=200, **kwargs):
super().__init__(img_shape=img_shape, max_radius=max_radius, min_radius=min_radius, num_cells=num_cells,\
psf=psf, smooth_std=smooth_std, noise_mean=noise_mean,\
noise_std=noise_std)
self.position_std = position_std
self.cell_elongation = cell_elongation
self.irregularity_extend = irregularity_extend
def _preparations(self):
pass
def _generate_foreground(self):
# determine ellipsoid parameters (adjusted to the image size)
a,b,c = [int(i*0.4) for i in self.img_shape]
x,y,z = np.indices(self.img_shape, dtype=np.float16)
# distort the coordinates with random gaussian distributions to simulate random shape irregularities
# coords = coords +/- extend * exp(-x_norm**2/sigma_x - y_norm**2/sigma_y**2 - z_norm**2/sigma_z**2)
extend_x = (-1)**np.random.randint(0,2) * np.random.randint(self.irregularity_extend/2,np.maximum(self.irregularity_extend,1))
extend_y = (-1)**np.random.randint(0,2) * np.random.randint(self.irregularity_extend/2,np.maximum(self.irregularity_extend,1))
extend_z = (-1)**np.random.randint(0,2) * np.random.randint(self.irregularity_extend/2,np.maximum(self.irregularity_extend,1))
distortion_x = np.exp(- np.divide(x-np.random.randint(0,2*a),np.random.randint(a/2,a),dtype=np.float16)**2 - np.divide(y-np.random.randint(0,2*b),np.random.randint(b/2,b),dtype=np.float16)**2 - np.divide(z-np.random.randint(0,2*c),np.random.randint(c/2,c),dtype=np.float16)**2, dtype=np.float16)
distortion_y = np.exp(- np.divide(x-np.random.randint(0,2*a),np.random.randint(a/2,a),dtype=np.float16)**2 - np.divide(y-np.random.randint(0,2*b),np.random.randint(b/2,b),dtype=np.float16)**2 - np.divide(z-np.random.randint(0,2*c),np.random.randint(c/2,c),dtype=np.float16)**2, dtype=np.float16)
distortion_z = np.exp(- np.divide(x-np.random.randint(0,2*a),np.random.randint(a/2,a),dtype=np.float16)**2 - np.divide(y-np.random.randint(0,2*b),np.random.randint(b/2,b),dtype=np.float16)**2 - np.divide(z-np.random.randint(0,2*c),np.random.randint(c/2,c),dtype=np.float16)**2, dtype=np.float16)
x = x + extend_x * distortion_x
y = y + extend_y * distortion_y
z = z + extend_z * distortion_z
# within ellipsoid equation: (x/a)^2 + (y/b)^2 + /z/c)^2 < 1
ellipsoid = ((x-self.img_shape[0]//2)/a)**2 + ((y-self.img_shape[1]//2)/b)**2 + ((z-self.img_shape[2]//2)/c)**2
self.fg_map = ellipsoid<=1
self._generate_distmap()
def _generate_positions(self):
positions = np.zeros((self.num_cells, 3), dtype=np.uint16)
# Get map of possible cell locations (outer ring)
location_map = np.logical_xor(self.fg_map, morphology.binary_erosion(self.fg_map, selem=morphology.ball(self.position_std*2)))
locations = np.array(np.nonzero(location_map))
# Get cell parameters (*2 since we are looking for centroids)
cell_shape = 2*np.array([self.max_radius, self.max_radius/self.cell_elongation, self.max_radius/self.cell_elongation])
for cell_count in range(self.num_cells):
print_timestamp('Placing cell {0}/{1}...', [cell_count+1, self.num_cells])
# Get random centroid
if locations.shape[1] == 0:
print_timestamp('The maximum number of cells ({0}) was reached...', [cell_count+1])
positions = positions[:cell_count-1,:]
break
location = locations[:,np.random.randint(0, locations.shape[1])]
positions[cell_count,:] = location
# Exclude region of current cell from possible future locations
distances = locations - location[:,np.newaxis]
distances = distances / cell_shape[:,np.newaxis]
distances = np.sum(distances**2, axis=0)
locations = locations[:,distances>1]
return positions
def _generate_instances(self):
# calculate the gradient direction at each position (used to orient each cell)
grad_map_x, grad_map_y, grad_map_z = np.gradient(self.dist_map, 5)
grad_map_x = gaussian_filter(grad_map_x, 5)
grad_map_y = gaussian_filter(grad_map_y, 5)
grad_map_z = gaussian_filter(grad_map_z, 5)
# normalize the gradient vectors to unit length
grad_norm = np.sqrt(grad_map_x**2 + grad_map_y**2 + grad_map_z**2)
grad_map_x = grad_map_x/grad_norm
grad_map_y = grad_map_y/grad_norm
grad_map_z = grad_map_z/grad_norm
# create local coordinates
cell_mask_shape = (self.max_radius*3,)*3
coords_default = np.indices(cell_mask_shape)
coords_default = np.reshape(coords_default, (3,-1))
coords_default = np.subtract(coords_default, coords_default.max(axis=1, keepdims=True)//2)
coords_default = coords_default.astype(np.float16)
# place a cell at each position
instance_mask = np.zeros(self.dist_map.shape, dtype=np.uint16)
for num_cell, pos in enumerate(self.positions):
print_timestamp('Generating cell {0}/{1}...', [num_cell+1, len(self.positions)])
cell_size = np.random.randint(self.min_radius,self.max_radius)
a,b,c = [cell_size,cell_size/self.cell_elongation,cell_size/self.cell_elongation]
coords = coords_default.copy()
# rotation axis is perpendicular to gradient direction and the major axis of the cell
grad_vec = [grad_map_x[tuple(pos)], grad_map_y[tuple(pos)], grad_map_z[tuple(pos)]]
cell_vec = [0,]*3
cell_vec[np.argmax([a,b,c])] = 1
rot_axis = np.cross(grad_vec, cell_vec)
axis_norm = np.sqrt(np.sum(rot_axis**2))
if not axis_norm==0:
# normalize the rotation axis
rot_axis = rot_axis / axis_norm
# calculate the angle from: a*b = ||a||*||b||*cos(angle)
rot_angle = np.arccos(np.dot(grad_vec, cell_vec)/1)
# rotate using the quaternion
cell_quant = Quaternion(axis=rot_axis, angle=rot_angle)
coords = np.matmul(cell_quant.rotation_matrix, coords)
coords = coords.reshape((3,)+cell_mask_shape)
x_new = coords[0,...]
y_new = coords[1,...]
z_new = coords[2,...]
ellipsoid = ((x_new/a)**2 + (y_new/b)**2 + (z_new/c)**2) <= 1
slice_start = [np.minimum(np.maximum(0,p-c//2),i-c) for p,c,i in zip(pos,cell_mask_shape,self.img_shape)]
slice_end = [s+c for s,c in zip(slice_start,cell_mask_shape)]
slicing = tuple(map(slice, slice_start, slice_end))
instance_mask[slicing] = np.maximum(instance_mask[slicing], (num_cell+1)*ellipsoid.astype(np.uint16))
self.instance_mask = instance_mask.astype(np.uint16)
class SyntheticDRO(SyntheticNuclei):
def __init__(self, img_shape=(300,600,1200), min_radius=13, max_radius=18, cell_elongation=3, num_cells=1000, psf=None,\
smooth_std=0.5, noise_std=0.1, noise_mean=-0.1, position_std=3, irregularity_extend=200, **kwargs):
super().__init__(img_shape=img_shape, max_radius=max_radius, min_radius=min_radius, num_cells=num_cells,\
psf=psf, smooth_std=smooth_std, noise_mean=noise_mean,\
noise_std=noise_std)
self.position_std = position_std
self.cell_elongation = cell_elongation
self.irregularity_extend = irregularity_extend
def _preparations(self):
pass
def _generate_foreground(self):
# Determine positions
coords = np.indices(self.img_shape, dtype=np.float16)
coords[0,...] -= self.img_shape[0]//2
coords[1,...] -= self.img_shape[1]//2
coords[2,...] -= self.img_shape[2]//2
# Rotate workspace around x- and y-axis between 0 and 10 degree
coords = coords.reshape((3,-1))
alpha_x = -np.radians(np.random.randint(5,10))
alpha_y = -np.radians(np.random.randint(5,10))
Rx = np.array([[1,0,0],[0,np.cos(alpha_x),-np.sin(alpha_x)],[0,np.sin(alpha_x),np.cos(alpha_x)]])
Ry = np.array([[np.cos(alpha_y),0,np.sin(alpha_y)],[0,1,0],[-np.sin(alpha_y),0,np.cos(alpha_y)]])
coords = np.matmul(Rx,coords)
coords = np.matmul(Ry,coords)
coords = coords.reshape((3,)+self.img_shape)
# determine ellipsoid parameters (adjusted to the image size)
a,b,c = [int(i*0.4) for i in self.img_shape]
# distort the coordinates with large random gaussian distributions to simulate shape irregularities
# coords = coords +/- extend * exp(-x_norm**2/sigma_x - y_norm**2/sigma_y**2 - z_norm**2/sigma_z**2)
extend_x = (-1)**np.random.randint(0,2) * np.random.randint(self.irregularity_extend/2,np.maximum(self.irregularity_extend,1))
extend_y = (-1)**np.random.randint(0,2) * np.random.randint(self.irregularity_extend/2,np.maximum(self.irregularity_extend,1))
extend_z = (-1)**np.random.randint(0,2) * np.random.randint(self.irregularity_extend/2,np.maximum(self.irregularity_extend,1))
distortion_x = np.exp(- np.divide(coords[0,...]-np.random.randint(0,2*a),np.random.randint(a/2,a),dtype=np.float16)**2\
- np.divide(coords[1,...]-np.random.randint(0,2*b),np.random.randint(b/2,b),dtype=np.float16)**2\
- np.divide(coords[2,...]-np.random.randint(0,2*c),np.random.randint(c/2,c),dtype=np.float16)**2, dtype=np.float16)
distortion_y = np.exp(- np.divide(coords[0,...]-np.random.randint(0,2*a),np.random.randint(a/2,a),dtype=np.float16)**2\
- np.divide(coords[1,...]-np.random.randint(0,2*b),np.random.randint(b/2,b),dtype=np.float16)**2\
- np.divide(coords[2,...]-np.random.randint(0,2*c),np.random.randint(c/2,c),dtype=np.float16)**2, dtype=np.float16)
distortion_z = np.exp(- np.divide(coords[0,...]-np.random.randint(0,2*a),np.random.randint(a/2,a),dtype=np.float16)**2\
- np.divide(coords[1,...]-np.random.randint(0,2*b),np.random.randint(b/2,b),dtype=np.float16)**2\
- np.divide(coords[2,...]-np.random.randint(0,2*c),np.random.randint(c/2,c),dtype=np.float16)**2, dtype=np.float16)
coords[0,...] = coords[0,...] + extend_x * distortion_x
coords[1,...] = coords[1,...] + extend_y * distortion_y
coords[2,...] = coords[2,...] + extend_z * distortion_z
# distort the coordinates with small gaussian distributions to simulate identations
for i in range(np.random.randint(0,5)):
extend_x = np.random.randint(a,a*2)
extend_y = np.random.randint(b,b*2)
extend_z = np.random.randint(c,c*2)
distortion_x = np.exp(- np.divide(coords[0,...]-np.random.randint(a/2,a),np.random.randint(a/2,a),dtype=np.float16)**2\
- np.divide(coords[1,...]-np.random.randint(b/2,b),np.random.randint(b/2,b),dtype=np.float16)**2\
- np.divide(coords[2,...]-np.random.randint(c/2,c),np.random.randint(c/20,c/10),dtype=np.float16)**2, dtype=np.float16)
distortion_y = np.exp(- np.divide(coords[0,...]-np.random.randint(a/2,a),np.random.randint(a/2,a),dtype=np.float16)**2\
- np.divide(coords[1,...]-np.random.randint(b/2,b),np.random.randint(b/2,b),dtype=np.float16)**2\
- np.divide(coords[2,...]-np.random.randint(c/2,c),np.random.randint(c/20,c/10),dtype=np.float16)**2, dtype=np.float16)
distortion_z = np.exp(- np.divide(coords[0,...]-np.random.randint(a/2,a),np.random.randint(a/2,a),dtype=np.float16)**2\
- np.divide(coords[1,...]-np.random.randint(b/2,b),np.random.randint(b/2,b),dtype=np.float16)**2\
- np.divide(coords[2,...]-np.random.randint(c/2,c),np.random.randint(c/20,c/10),dtype=np.float16)**2, dtype=np.float16)
coords[0,...] = coords[0,...] + np.sign(coords[0,...]) * extend_x * distortion_x
coords[1,...] = coords[1,...] + np.sign(coords[1,...]) * extend_y * distortion_y
coords[2,...] = coords[2,...] + np.sign(coords[2,...]) * extend_z * distortion_z
# within ellipsoid equation: (x/a)^2 + (y/b)^2 + /z/c)^2 < 1
ellipsoid = (coords[0,...]/a)**2 + (coords[1,...]/b)**2 + (coords[2,...]/c)**2
self.fg_map = ellipsoid<=1
self._generate_distmap()
def _generate_positions(self):
positions = np.zeros((self.num_cells, 3), dtype=np.uint16)
# Get map of possible cell locations (outer ring)
location_map = np.logical_xor(self.fg_map, morphology.binary_erosion(self.fg_map, selem=morphology.ball(self.position_std*2)))
locations = np.array(np.nonzero(location_map))
# Get cell parameters (*2 since we are looking for centroids)
cell_shape = 2*np.array([self.max_radius, self.max_radius/self.cell_elongation, self.max_radius/self.cell_elongation])
for cell_count in range(self.num_cells):
print_timestamp('Placing cell {0}/{1}...', [cell_count+1, self.num_cells])
# Get random centroid
if locations.shape[1] == 0:
print_timestamp('The maximum number of cells ({0}) was reached...', [cell_count+1])
positions = positions[:cell_count-1,:]
break
location = locations[:,np.random.randint(0, locations.shape[1])]
positions[cell_count,:] = location
# Exclude region of current cell from possible future locations
distances = locations - location[:,np.newaxis]
distances = distances / cell_shape[:,np.newaxis]
distances = np.sum(distances**2, axis=0)
locations = locations[:,distances>1]
return positions
def _generate_instances(self):
# calculate the gradient direction at each position (used to orient each cell)
grad_map_x, grad_map_y, grad_map_z = np.gradient(self.dist_map, 5)
grad_map_x = gaussian_filter(grad_map_x, 5)
grad_map_y = gaussian_filter(grad_map_y, 5)
grad_map_z = gaussian_filter(grad_map_z, 5)
# normalize the gradient vectors to unit length
grad_norm = np.sqrt(grad_map_x**2 + grad_map_y**2 + grad_map_z**2)
grad_map_x = grad_map_x/grad_norm
grad_map_y = grad_map_y/grad_norm
grad_map_z = grad_map_z/grad_norm
# create local coordinates
cell_mask_shape = (self.max_radius*3,)*3
coords_default = np.indices(cell_mask_shape)
coords_default = np.reshape(coords_default, (3,-1))
coords_default = np.subtract(coords_default, coords_default.max(axis=1, keepdims=True)//2)
coords_default = coords_default.astype(np.float16)
# place a cell at each position
instance_mask = np.zeros(self.dist_map.shape, dtype=np.uint16)
for num_cell, pos in enumerate(self.positions):
print_timestamp('Generating cell {0}/{1}...', [num_cell+1, len(self.positions)])
cell_size = np.random.randint(self.min_radius,self.max_radius)
a,b,c = [cell_size,cell_size/self.cell_elongation,cell_size/self.cell_elongation]
coords = coords_default.copy()
# rotation axis is perpendicular to gradient direction and the major axis of the cell
grad_vec = [grad_map_x[tuple(pos)], grad_map_y[tuple(pos)], grad_map_z[tuple(pos)]]
cell_vec = [0,]*3
cell_vec[np.argmax([a,b,c])] = 1
rot_axis = np.cross(grad_vec, cell_vec)
axis_norm = np.sqrt(np.sum(rot_axis**2))
if not axis_norm==0:
# normalize the rotation axis
rot_axis = rot_axis / axis_norm
# calculate the angle from: a*b = ||a||*||b||*cos(angle)
rot_angle = np.arccos(np.dot(grad_vec, cell_vec)/1)
# rotate using the quaternion
cell_quant = Quaternion(axis=rot_axis, angle=rot_angle)
coords = np.matmul(cell_quant.rotation_matrix, coords)
coords = coords.reshape((3,)+cell_mask_shape)
x_new = coords[0,...]
y_new = coords[1,...]
z_new = coords[2,...]
ellipsoid = ((x_new/a)**2 + (y_new/b)**2 + (z_new/c)**2) <= 1
slice_start = [np.minimum(np.maximum(0,p-c//2),i-c) for p,c,i in zip(pos,cell_mask_shape,self.img_shape)]
slice_end = [s+c for s,c in zip(slice_start,cell_mask_shape)]
slicing = tuple(map(slice, slice_start, slice_end))
instance_mask[slicing] = np.maximum(instance_mask[slicing], (num_cell+1)*ellipsoid.astype(np.uint16))
self.instance_mask = instance_mask.astype(np.uint16)
| 47.250324 | 303 | 0.592204 | 4,818 | 36,430 | 4.272105 | 0.091739 | 0.032648 | 0.059758 | 0.022543 | 0.730749 | 0.710003 | 0.693582 | 0.678327 | 0.662537 | 0.655201 | 0 | 0.032854 | 0.284793 | 36,430 | 771 | 304 | 47.250324 | 0.757129 | 0.100439 | 0 | 0.54195 | 0 | 0 | 0.036988 | 0.005996 | 0 | 0 | 0 | 0 | 0.006803 | 1 | 0.052154 | false | 0.004535 | 0.024943 | 0 | 0.097506 | 0.049887 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
712dccf11a8623f93cc1a6d9e212a9aebc4a39f9 | 425 | py | Python | test/test_backbone_utils.py | Gaurav7888/vision | a227a2026ef57fce08c1a73bad61bbabc3a52bfd | [
"BSD-3-Clause"
] | 2 | 2021-07-23T15:45:49.000Z | 2021-07-27T17:34:15.000Z | test/test_backbone_utils.py | Gaurav7888/vision | a227a2026ef57fce08c1a73bad61bbabc3a52bfd | [
"BSD-3-Clause"
] | 46 | 2020-10-20T09:52:53.000Z | 2021-08-15T09:29:27.000Z | test/test_backbone_utils.py | Gaurav7888/vision | a227a2026ef57fce08c1a73bad61bbabc3a52bfd | [
"BSD-3-Clause"
] | 2 | 2021-06-29T14:30:51.000Z | 2021-06-29T14:32:21.000Z | import torch
from torchvision.models.detection.backbone_utils import resnet_fpn_backbone
import pytest
@pytest.mark.parametrize('backbone_name', ('resnet18', 'resnet50'))
def test_resnet_fpn_backbone(backbone_name):
x = torch.rand(1, 3, 300, 300, dtype=torch.float32, device='cpu')
y = resnet_fpn_backbone(backbone_name=backbone_name, pretrained=False)(x)
assert list(y.keys()) == ['0', '1', '2', '3', 'pool']
| 35.416667 | 77 | 0.731765 | 60 | 425 | 4.983333 | 0.6 | 0.160535 | 0.170569 | 0.167224 | 0.19398 | 0 | 0 | 0 | 0 | 0 | 0 | 0.047745 | 0.112941 | 425 | 11 | 78 | 38.636364 | 0.745358 | 0 | 0 | 0 | 0 | 0 | 0.094118 | 0 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.125 | false | 0 | 0.375 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
713b53cdaf7061c2baa7e69b3bfc2dd5bcba7b31 | 170 | py | Python | AMAO/settings/celery_settings.py | arruda/amao | 83648aa2c408b1450d721b3072dc9db4b53edbb8 | [
"MIT"
] | 2 | 2017-04-26T14:08:02.000Z | 2017-09-01T13:10:17.000Z | AMAO/settings/celery_settings.py | arruda/amao | 83648aa2c408b1450d721b3072dc9db4b53edbb8 | [
"MIT"
] | null | null | null | AMAO/settings/celery_settings.py | arruda/amao | 83648aa2c408b1450d721b3072dc9db4b53edbb8 | [
"MIT"
] | null | null | null | #coding: utf-8
import djcelery
djcelery.setup_loader()
BROKER_HOST = "localhost"
BROKER_PORT = 5672
BROKER_USER = "admin"
BROKER_PASSWORD = "mypass"
BROKER_VHOST = "/"
| 15.454545 | 26 | 0.752941 | 22 | 170 | 5.545455 | 0.772727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033784 | 0.129412 | 170 | 10 | 27 | 17 | 0.790541 | 0.076471 | 0 | 0 | 0 | 0 | 0.134615 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.142857 | 0.142857 | 0 | 0.142857 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
713cd9544e5aedaf7a2f331751f6c1faba151b1d | 433 | py | Python | kanbanflow_prj_selector/cli.py | igorbasko01/kanbanflow-prj-selector | e8f2ba010e2b09ef55954c564b34fbfd0a991bed | [
"MIT"
] | null | null | null | kanbanflow_prj_selector/cli.py | igorbasko01/kanbanflow-prj-selector | e8f2ba010e2b09ef55954c564b34fbfd0a991bed | [
"MIT"
] | null | null | null | kanbanflow_prj_selector/cli.py | igorbasko01/kanbanflow-prj-selector | e8f2ba010e2b09ef55954c564b34fbfd0a991bed | [
"MIT"
] | null | null | null | """Console script for kanbanflow_prj_selector."""
import sys
import click
from .kanbanflow_prj_selector import start
@click.command()
@click.option('-f', '--board-token-path', help='Input file with board tokens to fetch', required=True)
def main(board_token_path):
"""Console script for kanbanflow_prj_selector."""
start(board_token_path)
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| 24.055556 | 102 | 0.725173 | 60 | 433 | 4.933333 | 0.6 | 0.131757 | 0.212838 | 0.175676 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0.002703 | 0.145497 | 433 | 17 | 103 | 25.470588 | 0.797297 | 0.242494 | 0 | 0 | 0 | 0 | 0.205047 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.3 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
71414595796daa9ea1d1cb02aaa1e96629a157dd | 194 | py | Python | v1/schemas/auth.py | takotab/cloudrun-fastapi | 515145dc68f49b222cf582940368fdba4b6dcfd3 | [
"MIT"
] | 74 | 2020-02-06T18:20:36.000Z | 2021-11-28T11:46:02.000Z | v1/schemas/auth.py | takotab/cloudrun-fastapi | 515145dc68f49b222cf582940368fdba4b6dcfd3 | [
"MIT"
] | 13 | 2020-03-03T01:00:27.000Z | 2021-10-21T17:11:15.000Z | v1/schemas/auth.py | takotab/cloudrun-fastapi | 515145dc68f49b222cf582940368fdba4b6dcfd3 | [
"MIT"
] | 17 | 2020-02-08T13:20:21.000Z | 2021-11-02T10:19:16.000Z | from pydantic import UUID4, BaseModel, EmailStr
class Token(BaseModel):
access_token: str
token_type: str
class TokenData(BaseModel):
email: EmailStr = None
id: UUID4 = None
| 16.166667 | 47 | 0.71134 | 24 | 194 | 5.666667 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013158 | 0.216495 | 194 | 11 | 48 | 17.636364 | 0.881579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.142857 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8530f3a6fb147cef24829cc3993767b80c2ae23e | 209 | py | Python | test/img.py | milligan22963/Camera | 8d76d8366dc0dda755e1595f26b6b6caaf2f61a6 | [
"MIT"
] | null | null | null | test/img.py | milligan22963/Camera | 8d76d8366dc0dda755e1595f26b6b6caaf2f61a6 | [
"MIT"
] | null | null | null | test/img.py | milligan22963/Camera | 8d76d8366dc0dda755e1595f26b6b6caaf2f61a6 | [
"MIT"
] | null | null | null | from PIL import Image
def main():
try:
image = Image.open("/home/daniel/Pictures/buddy.jpg");
except IOError:
print("Error")
pass
if __name__ == "__main__":
print("Starting main")
main()
| 16.076923 | 58 | 0.641148 | 27 | 209 | 4.666667 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.205742 | 209 | 12 | 59 | 17.416667 | 0.759036 | 0 | 0 | 0 | 0 | 0 | 0.272727 | 0.148325 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0.1 | 0.1 | 0 | 0.2 | 0.2 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
85380f6a877ccb17d787241d534302810a979279 | 833 | py | Python | test_app/api/views.py | iamswaroopp/django-scaffold-generator | c9aa7269c3a3d9a618dbf41aac8e23649be64d48 | [
"MIT"
] | 6 | 2021-05-20T14:46:23.000Z | 2022-01-24T07:07:55.000Z | test_app/api/views.py | iamswaroopp/django-scaffold-generator | c9aa7269c3a3d9a618dbf41aac8e23649be64d48 | [
"MIT"
] | null | null | null | test_app/api/views.py | iamswaroopp/django-scaffold-generator | c9aa7269c3a3d9a618dbf41aac8e23649be64d48 | [
"MIT"
] | null | null | null |
from rest_framework import viewsets
from rest_framework.viewsets import ModelViewSet
from rest_framework.permissions import DjangoModelPermissions
from ..models import Blog
from .serializers import BlogSerializer
class BlogViewset(ModelViewSet):
permission_classes = [ DjangoModelPermissions ]
serializer_class = BlogSerializer
def get_queryset(self):
return Blog.objects.all()
from rest_framework import viewsets
from rest_framework.viewsets import ModelViewSet
from rest_framework.permissions import DjangoModelPermissions
from ..models import Comment
from .serializers import CommentSerializer
class CommentViewset(ModelViewSet):
permission_classes = [ DjangoModelPermissions ]
serializer_class = CommentSerializer
def get_queryset(self):
return Comment.objects.all()
| 19.372093 | 61 | 0.798319 | 84 | 833 | 7.77381 | 0.321429 | 0.073507 | 0.156202 | 0.070444 | 0.722818 | 0.649311 | 0.447167 | 0.447167 | 0.447167 | 0.447167 | 0 | 0 | 0.156062 | 833 | 42 | 62 | 19.833333 | 0.928876 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.5 | 0.1 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
854005657bfe33d29052924f10923a274658e20b | 770 | py | Python | SimCalorimetry/HcalZeroSuppressionProducers/python/NoHcalZeroSuppression_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | SimCalorimetry/HcalZeroSuppressionProducers/python/NoHcalZeroSuppression_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | SimCalorimetry/HcalZeroSuppressionProducers/python/NoHcalZeroSuppression_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | # Fragment to switch off HCAL zero suppression as an option
# by cmsDriver customisation
# to generate Unsuppressed digis, one has to set the following parameter:
# process.simHcalDigis.useConfigZSvalues = 1
# to generate suppressed digis, useConfigZSvalues should be set to 0
import FWCore.ParameterSet.Config as cms
def customise(process):
# process.hcalDigiSequence.replace(process.simHcalDigis,cms.SequencePlaceholder("simHcalDigis"))
# process.load("SimCalorimetry.HcalZeroSuppressionProducers.hcalDigisNoSuppression_cfi")
process.simHcalDigis.HBlevel = -999
process.simHcalDigis.HElevel = -999
process.simHcalDigis.HOlevel = -999
process.simHcalDigis.HFlevel = -999
process.simHcalDigis.useConfigZSvalues = 1
return(process)
| 36.666667 | 99 | 0.787013 | 83 | 770 | 7.289157 | 0.614458 | 0.219835 | 0.145455 | 0.122314 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022624 | 0.138961 | 770 | 20 | 100 | 38.5 | 0.889894 | 0.58961 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.125 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
85555d370982e1458f4b4957aa368e80824c0d07 | 8,353 | py | Python | automation/models.py | leonolan2020/phoenix | b5956a7003e548f01255cbd5d0d76cfd0ac77a81 | [
"MIT"
] | 1 | 2020-09-19T21:56:40.000Z | 2020-09-19T21:56:40.000Z | automation/models.py | leonolan2020/phoenix | b5956a7003e548f01255cbd5d0d76cfd0ac77a81 | [
"MIT"
] | null | null | null | automation/models.py | leonolan2020/phoenix | b5956a7003e548f01255cbd5d0d76cfd0ac77a81 | [
"MIT"
] | 5 | 2020-09-18T18:53:03.000Z | 2020-10-21T14:42:00.000Z | from app.persian import PersianCalendar
from django.db import models
from .enums import UnitNameEnum,ProductRequestStatusEnum,LetterStatusEnum,AgentRoleEnum
from app.enums import ColorEnum,IconsEnum,EmployeeEnum,DegreeLevelEnum
from django.shortcuts import reverse
from app.settings import ADMIN_URL
from django.utils.translation import gettext as _
from .apps import APP_NAME
from app.models import OurWork
class WorkUnit(models.Model):
title=models.CharField(_("title"),choices=UnitNameEnum.choices,default=UnitNameEnum.ACCOUNTING, max_length=50)
icon=models.CharField(_("icon"),choices=IconsEnum.choices,default=IconsEnum.link, max_length=50)
color=models.CharField(_("color"),choices=ColorEnum.choices,default=ColorEnum.PRIMARY, max_length=50)
employees=models.ManyToManyField("Employee", verbose_name=_("نیروی انسانی"),blank=True)
description=models.CharField(_("description"), max_length=500,null=True,blank=True)
class Meta:
verbose_name = _("WorkUnit")
verbose_name_plural = _("WorkUnits - واحد های سازمانی")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("automation:work_unit", kwargs={"work_unit_id": self.pk})
def get_edit_url(self):
return f'{ADMIN_URL}{APP_NAME}/workunit/{self.pk}/change/'
class ProductRequestSignature(models.Model):
signature=models.ForeignKey("app.Signature", verbose_name=_("signatures"), on_delete=models.PROTECT)
status=models.CharField(_("status"),choices=ProductRequestStatusEnum.choices,default=ProductRequestStatusEnum.REQUESTED, max_length=50)
def get_status_tag(self):
color='primary'
if self.status==ProductRequestStatusEnum.ACCEPTED:
color='success'
if self.status==ProductRequestStatusEnum.CANCELED:
color='secondary'
if self.status==ProductRequestStatusEnum.COMPLETED:
color='primary'
if self.status==ProductRequestStatusEnum.DENIED:
color='danger'
if self.status==ProductRequestStatusEnum.PROCCESSING:
color='light'
if self.status==ProductRequestStatusEnum.IN_PROGRESS:
color='warning'
if self.status==ProductRequestStatusEnum.REQUESTED:
color='info'
return f'<span class="badge badge-{color}">{self.status}</span>'
class Meta:
verbose_name = _("ProductRequestSignature")
verbose_name_plural = _("ProductRequestSignatures -امضاهای درخواست های خرید")
def __str__(self):
return f'{self.signature.profile.name()} : {self.status}'
def get_absolute_url(self):
return reverse("ProductRequestSignature_detail", kwargs={"pk": self.pk})
class Employee(models.Model):
profile=models.ForeignKey("app.Profile",related_name='automationprofile', verbose_name=_("profile"),null=True,blank=True, on_delete=models.PROTECT)
role=models.CharField(_("نقش"),choices=EmployeeEnum.choices,default=EmployeeEnum.DEFAULT, max_length=50)
degree=models.CharField(_("مدرک"),choices=DegreeLevelEnum.choices,default=DegreeLevelEnum.KARSHENASI, max_length=50)
major=models.CharField(_("رشته تحصیلی"),null=True,blank=True, max_length=50)
introducer=models.CharField(_("معرف"),null=True,blank=True, max_length=50)
def __str__(self):
return self.profile.name()
def name(self):
if self.profile is not None:
return self.profile.name()
return "پروفایل خالی"
class Meta:
verbose_name = _("Employee")
verbose_name_plural = _("کارمندان")
def get_absolute_url(self):
return reverse('app:profile',kwargs={'profile_id':self.profile.pk})
def get_edit_url(self):
if self.profile is not None:
return self.profile.get_edit_url()
class ProductRequest(models.Model):
employee=models.ForeignKey("Employee", verbose_name=_("پرسنل"),null=True,blank=True, on_delete=models.SET_NULL)
work_unit=models.ForeignKey("WorkUnit", verbose_name=_("واحد سازمانی"), on_delete=models.PROTECT)
product=models.ForeignKey("market.Product", verbose_name=_("کالا"), on_delete=models.PROTECT)
product_unit=models.CharField(_("واحد"), max_length=50)
quantity=models.IntegerField(_("تعداد"))
date_added=models.DateTimeField(_("date_added"), auto_now=False, auto_now_add=True)
status=models.CharField(_("وضعیت"),choices=ProductRequestStatusEnum.choices,default=ProductRequestStatusEnum.REQUESTED, max_length=50)
purchase_agent=models.ForeignKey("PurchaseAgent", verbose_name=_("مسئول خرید"), on_delete=models.PROTECT,null=True,blank=True)
signatures=models.ManyToManyField("ProductRequestSignature", verbose_name=_("امضا ها"),blank=True)
class Meta:
verbose_name = _("ProductRequest")
verbose_name_plural = _("ProductRequests -درخواست های خرید")
def get_status_tag(self):
color='primary'
if self.status==ProductRequestStatusEnum.ACCEPTED:
color='success'
if self.status==ProductRequestStatusEnum.CANCELED:
color='secondary'
if self.status==ProductRequestStatusEnum.COMPLETED:
color='primary'
if self.status==ProductRequestStatusEnum.DENIED:
color='danger'
if self.status==ProductRequestStatusEnum.PROCCESSING:
color='light'
if self.status==ProductRequestStatusEnum.IN_PROGRESS:
color='warning'
if self.status==ProductRequestStatusEnum.REQUESTED:
color='info'
return f'<span class="badge badge-{color}">{self.status}</span>'
def __str__(self):
return f'{self.work_unit.title} / {self.product.name} : {self.quantity} {self.product_unit}'
def get_edit_url(self):
return ADMIN_URL+APP_NAME+'/productrequest/'+str(self.pk)+'/change/'
def get_absolute_url(self):
return reverse("automation:product_request", kwargs={"product_request_id": self.pk})
class PurchaseAgent(models.Model):
profile=models.ForeignKey("app.Profile", verbose_name=_("profile"), on_delete=models.CASCADE)
rank=models.IntegerField(_("rank"),default=0)
class Meta:
verbose_name = _("PurchaseAgent")
verbose_name_plural = _("PurchaseAgents - مسئول های خرید")
def __str__(self):
return f'{self.profile.name()} ({self.rank})'
def get_absolute_url(self):
return reverse("PurchaseAgent_detail", kwargs={"pk": self.pk})
class LetterSignature(models.Model):
signature=models.ForeignKey("app.Signature", verbose_name=_("signatures"), on_delete=models.PROTECT)
status=models.CharField(_("status"),choices=LetterStatusEnum.choices,default=LetterStatusEnum.DRAFT, max_length=50)
class Meta:
verbose_name = _("LetterSignature")
verbose_name_plural = _("LetterSignatures - امضا های نامه ها")
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("LetterSignature_detail", kwargs={"pk": self.pk})
class Letter(models.Model):
sender=models.ForeignKey("app.Profile", verbose_name=_("فرستنده"), on_delete=models.CASCADE)
work_unit=models.ForeignKey("WorkUnit", verbose_name=_("گیرنده"), on_delete=models.CASCADE)
title=models.CharField(_("title"), max_length=50)
body=models.CharField(_("body"), max_length=50)
date_added=models.DateTimeField(_("date_added"), auto_now=False, auto_now_add=True)
signatures=models.ManyToManyField("LetterSignature", verbose_name=_("signatures"),blank=True)
class Meta:
verbose_name = _("Letter")
verbose_name_plural = _("Lettes - نامه ها")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("Letter_detail", kwargs={"pk": self.pk})
class Project(OurWork):
work_units=models.ManyToManyField("WorkUnit", verbose_name=_("work_units"),blank=True)
warehouses=models.ManyToManyField("market.WareHouse", verbose_name=_("warehouses"),blank=True)
class Meta:
verbose_name = _("Project")
verbose_name_plural = _("Projects - پروژه ها")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("automation:project", kwargs={"project_id": self.pk})
def get_edit_url(self):
return f'{ADMIN_URL}{APP_NAME}/project/{self.pk}/change/'
| 43.963158 | 151 | 0.709565 | 954 | 8,353 | 5.985325 | 0.179245 | 0.05972 | 0.029422 | 0.088266 | 0.494046 | 0.480736 | 0.424168 | 0.343958 | 0.326445 | 0.296497 | 0 | 0.004297 | 0.164252 | 8,353 | 190 | 152 | 43.963158 | 0.813637 | 0 | 0 | 0.477124 | 0 | 0.006536 | 0.175844 | 0.046325 | 0 | 0 | 0 | 0 | 0 | 1 | 0.150327 | false | 0 | 0.058824 | 0.124183 | 0.686275 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
855639cc0a04b3c6e819ec29c78c848da7561279 | 808 | py | Python | ok2_backend/KNSQueries/migrations/0002_auto_20210123_1519.py | moshe742/ok2-backend | 03243a296c3ef753dee2173b589a2d01bd36d2e0 | [
"MIT"
] | null | null | null | ok2_backend/KNSQueries/migrations/0002_auto_20210123_1519.py | moshe742/ok2-backend | 03243a296c3ef753dee2173b589a2d01bd36d2e0 | [
"MIT"
] | null | null | null | ok2_backend/KNSQueries/migrations/0002_auto_20210123_1519.py | moshe742/ok2-backend | 03243a296c3ef753dee2173b589a2d01bd36d2e0 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.3 on 2021-01-23 15:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('KNSQueries', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='person',
name='is_current',
),
migrations.AlterField(
model_name='knsquery',
name='query_id',
field=models.IntegerField(unique=True),
),
migrations.AlterField(
model_name='ministry',
name='ministry_id',
field=models.IntegerField(unique=True),
),
migrations.AlterField(
model_name='person',
name='person_id',
field=models.IntegerField(unique=True),
),
]
| 24.484848 | 51 | 0.553218 | 74 | 808 | 5.918919 | 0.513514 | 0.082192 | 0.171233 | 0.19863 | 0.372146 | 0.372146 | 0.292237 | 0.292237 | 0.292237 | 0.292237 | 0 | 0.03525 | 0.332921 | 808 | 32 | 52 | 25.25 | 0.777365 | 0.055693 | 0 | 0.461538 | 1 | 0 | 0.115637 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.038462 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
855e12d4846d6066ca80faa302d770e52bf51d3f | 1,550 | py | Python | apps/common/behaviors/uploadable.py | yudame/prakti-api | 9d00c1dd155f80839248e611e0d63b1c70041758 | [
"Unlicense"
] | null | null | null | apps/common/behaviors/uploadable.py | yudame/prakti-api | 9d00c1dd155f80839248e611e0d63b1c70041758 | [
"Unlicense"
] | null | null | null | apps/common/behaviors/uploadable.py | yudame/prakti-api | 9d00c1dd155f80839248e611e0d63b1c70041758 | [
"Unlicense"
] | null | null | null | import json
import uuid
from jsonfield import JSONField
from django.db import models
class Uploadable(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
url = models.URLField(default="")
meta_data = JSONField(blank=True, null=True)
class Meta:
abstract = True
# MODEL PROPERTIES
@property
def file_type(self):
if self.meta_data and isinstance(self.meta_data, str):
self.meta_data = json.loads(self.meta_data)
try:
return self.meta_data.get('type', "") if self.meta_data else ""
except:
return ""
@property
def name(self):
if self.meta_data and isinstance(self.meta_data, str):
self.meta_data = json.loads(self.meta_data)
return self.meta_data.get('name', "") if self.meta_data else ""
@property
def file_extension(self):
if self.meta_data and isinstance(self.meta_data, str):
self.meta_data = json.loads(self.meta_data)
return self.meta_data.get('ext', "") if self.meta_data else ""
@property
def link_title(self):
if self.name:
title = self.name
elif 'etc' in self.meta_data:
title = (self.meta_data['etc'] or "").upper()
else:
title = (self.meta_data['type'] or
"").upper() if 'type' in self.meta_data else ""
if 'ext' in self.meta_data:
title = title + " .%s" % (self.meta_data['ext'] or "").upper()
return title
| 30.392157 | 79 | 0.601935 | 205 | 1,550 | 4.409756 | 0.263415 | 0.221239 | 0.318584 | 0.09292 | 0.457965 | 0.372788 | 0.372788 | 0.308628 | 0.308628 | 0.308628 | 0 | 0.000894 | 0.278065 | 1,550 | 50 | 80 | 31 | 0.806971 | 0.010323 | 0 | 0.25 | 0 | 0 | 0.022846 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.1 | 0 | 0.45 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
856869ea9b6dabe9d40a2462ee1d13b5b5eaa0b9 | 708 | py | Python | lib/cli_commands/validate_classifier.py | antonsuba/ids-test-environment-sdn | 0292931cc7d9897eea7aae8c3c23a690fde7d862 | [
"MIT"
] | 1 | 2020-03-11T18:47:24.000Z | 2020-03-11T18:47:24.000Z | lib/cli_commands/validate_classifier.py | antonsuba/ids-test-environment-sdn | 0292931cc7d9897eea7aae8c3c23a690fde7d862 | [
"MIT"
] | null | null | null | lib/cli_commands/validate_classifier.py | antonsuba/ids-test-environment-sdn | 0292931cc7d9897eea7aae8c3c23a690fde7d862 | [
"MIT"
] | 2 | 2018-11-26T11:31:37.000Z | 2019-02-20T14:23:44.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import yaml
import inspect
from importlib import import_module
DIRNAME = os.path.dirname(os.path.abspath(inspect.stack()[0][1]))
CONFIG = os.path.join(DIRNAME, '../../config/config.yml')
with open(CONFIG, 'r') as config_file:
cfg = yaml.load(config_file).get('cli')
class ValidateClassifierCommand(object):
"Command driver for validating IDS classifiers"
trigger = 'validate'
def run(self, args):
if args:
print 'No available command args for "validate"'
return
validation_module = import_module(
'ml_ids.%s' % cfg['validation-module'])
validation_module.main()
| 24.413793 | 65 | 0.659605 | 90 | 708 | 5.111111 | 0.622222 | 0.03913 | 0.056522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005348 | 0.207627 | 708 | 28 | 66 | 25.285714 | 0.814617 | 0.053672 | 0 | 0 | 0 | 0 | 0.218563 | 0.034431 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.315789 | null | null | 0.052632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
857a1a342199e6e969244060bb3ca1b798f9735f | 1,305 | py | Python | implementations/tesla/bin/responsehandlers.py | djsincla/SplunkModularInputsPythonFramework | 1dd215214f3d2644cb358e41f4105fe40cff5393 | [
"Apache-2.0"
] | 3 | 2020-08-31T00:59:26.000Z | 2021-10-19T22:01:00.000Z | implementations/tesla/bin/responsehandlers.py | djsincla/SplunkModularInputsPythonFramework | 1dd215214f3d2644cb358e41f4105fe40cff5393 | [
"Apache-2.0"
] | null | null | null | implementations/tesla/bin/responsehandlers.py | djsincla/SplunkModularInputsPythonFramework | 1dd215214f3d2644cb358e41f4105fe40cff5393 | [
"Apache-2.0"
] | 1 | 2020-01-09T03:55:51.000Z | 2020-01-09T03:55:51.000Z | #add your custom response handler class to this module
import json
import datetime
#the default handler , does nothing , just passes the raw output directly to STDOUT
class DefaultResponseHandler:
def __init__(self, **args):
pass
def __call__(self, response_object, raw_response_output, response_type, req_args, endpoint):
print_xml_stream(raw_response_output)
class MyCustomTeslaHandler:
def __init__(self, **args):
pass
def __call__(self, response_object, raw_response_output, response_type, req_args, endpoint):
req_args["data"] = 'What does the fox say'
print_xml_stream(raw_response_output)
#HELPER FUNCTIONS
# prints XML stream
def print_xml_stream(s):
print "<stream><event unbroken=\"1\"><data>%s</data><done/></event></stream>" % encodeXMLText(s)
def encodeXMLText(text):
text = text.replace("&", "&")
text = text.replace("\"", """)
text = text.replace("'", "'")
text = text.replace("<", "<")
text = text.replace(">", ">")
text = text.replace("\n", "")
return text
| 31.829268 | 104 | 0.564751 | 138 | 1,305 | 5.072464 | 0.427536 | 0.08 | 0.128571 | 0.042857 | 0.34 | 0.34 | 0.251429 | 0.251429 | 0.251429 | 0.251429 | 0 | 0.001124 | 0.318008 | 1,305 | 40 | 105 | 32.625 | 0.785393 | 0.129502 | 0 | 0.347826 | 0 | 0 | 0.10424 | 0.035336 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.086957 | 0.086957 | null | null | 0.173913 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
858a0cf0d14ee703329ac16399a65e3ed5de626f | 5,007 | py | Python | make_json.py | DewMaple/head_box | 27ac90511344bfa75b340d1db960365e9eb148c7 | [
"Apache-2.0",
"MIT"
] | null | null | null | make_json.py | DewMaple/head_box | 27ac90511344bfa75b340d1db960365e9eb148c7 | [
"Apache-2.0",
"MIT"
] | null | null | null | make_json.py | DewMaple/head_box | 27ac90511344bfa75b340d1db960365e9eb148c7 | [
"Apache-2.0",
"MIT"
] | null | null | null | # A tool for hand labelling images
# Generates an IDL file
# pass in the directory where you store your images and a filename, then select the points on the images
# every time you hit next a line is generated
# the clear button removes are selected points on the current image
# when all files in the directory are processed, the idl file is written out
# ex: python make_idl.py train640x480 train.idl
# altered to output json
# ex: python make_json.py train640x480 train.json
# added button to skip an image
# enforce convention that rects are in top left, bottom right order
# correct name of image path object
import json
import sys
import imageio
import matplotlib as mpl
import matplotlib.image as mpimg
import matplotlib.patches as patches
import matplotlib.pyplot as plt
mpl.rcParams['toolbar'] = 'None'
from matplotlib.widgets import Button
from os import listdir
from os.path import isfile, join
json_images = []
top_corners = []
bottom_corners = []
patchCache = [] # the rectangles that get drawn on the image, stored so they can be removed in an orderly fashion
def removeAllPatches():
for patch in patchCache:
patch.remove()
patchCache[:] = []
def skip(event): # called when the skip button is hit
global filename
if len(onlyfiles) == 0:
outfile.write(json.dumps(json_images, indent=1))
plt.close()
else:
filename = path + "/" + onlyfiles.pop()
image = mpimg.imread(filename)
imshow_obj.set_data(image)
top_corners[:] = []
bottom_corners[:] = []
removeAllPatches()
def next(event): # called when the next button is hit
global filename
global json_images
rects = []
one_decimal = "{0:0.1f}"
for i in range(len(top_corners)):
x1 = float(one_decimal.format(top_corners[i][0]))
x2 = float(one_decimal.format(bottom_corners[i][0]))
y1 = float(one_decimal.format(top_corners[i][1]))
y2 = float(one_decimal.format(bottom_corners[i][1]))
# enforce x1,y1 = top left, x2,y2 = bottom right
tlx = min(x1, x2)
tly = min(y1, y2)
brx = max(x1, x2)
bry = max(y1, y2)
bbox = dict([("x1", tlx), ("y1", tly), ("x2", brx), ("y2", bry)])
rects.append(bbox)
json_image = dict([("image_path", filename), ("rects", rects)])
json_images.append(json_image)
progress_outfile.write(json.dumps(json_image, indent=1))
if len(onlyfiles) == 0:
outfile.write(json.dumps(json_images, indent=1))
plt.close()
else:
filename = path + "/" + onlyfiles.pop()
image = mpimg.imread(filename)
imshow_obj.set_data(image)
top_corners[:] = []
bottom_corners[:] = []
removeAllPatches()
def clear(event): # called when the clear button is hit
top_corners[:] = []
bottom_corners[:] = []
removeAllPatches()
def onclick(event): # called when anywhere inside the window is clicked
if event.xdata > 1 and event.ydata > 1:
if len(top_corners) > len(bottom_corners):
bottom_corners.append([event.xdata, event.ydata])
patchCache.append(patches.Rectangle((top_corners[-1][0], top_corners[-1][1])
, bottom_corners[-1][0] - top_corners[-1][0],
bottom_corners[-1][1] - top_corners[-1][1],
hatch='/', fill=False, edgecolor="red"))
ax.add_patch(patchCache[-1])
plt.draw()
else:
top_corners.append([event.xdata, event.ydata])
def undo(event): # called when the undo button is hit
# Only act when a path was drawn
if len(top_corners) == len(bottom_corners):
bottom_corners.pop()
top_corners.pop()
to_remove = patchCache.pop()
to_remove.remove()
ax = plt.gca()
# get our files for processing
if len(sys.argv) < 3:
print "Too few params, try something like: python make_json.py train640x480 train.json"
exit()
path = sys.argv[1]
outfile_name = sys.argv[2]
outfile = open(outfile_name, 'w')
progress_outfile = open(outfile_name + "_work", 'w')
onlyfiles = [f for f in listdir(path) if isfile(join(path, f))]
#
filename = path + "/" + onlyfiles.pop()
image = imageio.imread(filename)
imshow_obj = ax.imshow(image)
# plt.axis("off")
fig = plt.gcf()
# fig.set_size_inches(forward=True)
fig.canvas.mpl_connect('button_press_event', onclick)
# add the buttons to the bottom of the window
axundo = plt.axes([0.59, 0.01, 0.1, 0.075])
axnext = plt.axes([0.7, 0.01, 0.1, 0.075])
axclear = plt.axes([0.81, 0.01, 0.1, 0.075])
axskip = plt.axes([0.92, 0.01, 0.1, 0.075])
bundo = Button(axundo, 'Undo')
bundo.on_clicked(undo)
bnext = Button(axnext, 'Next')
bnext.on_clicked(next)
bclear = Button(axclear, 'Clear')
bclear.on_clicked(clear)
bskip = Button(axskip, 'Skip')
bskip.on_clicked(skip)
plt.show()
outfile.close()
progress_outfile.close()
print "finished"
| 29.627219 | 114 | 0.641102 | 707 | 5,007 | 4.44413 | 0.302687 | 0.04774 | 0.038192 | 0.029281 | 0.295672 | 0.262572 | 0.206238 | 0.140038 | 0.140038 | 0.112031 | 0 | 0.029964 | 0.233473 | 5,007 | 168 | 115 | 29.803571 | 0.788692 | 0.215099 | 0 | 0.241071 | 1 | 0 | 0.04585 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.089286 | null | null | 0.017857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
85a5679ea55a61eae86d19df12cb920b82adecd1 | 2,021 | py | Python | note37/attr_test2.py | icexmoon/python-learning-notes | 838c91d896404290b89992b6517be1b6a79df41f | [
"MIT"
] | null | null | null | note37/attr_test2.py | icexmoon/python-learning-notes | 838c91d896404290b89992b6517be1b6a79df41f | [
"MIT"
] | null | null | null | note37/attr_test2.py | icexmoon/python-learning-notes | 838c91d896404290b89992b6517be1b6a79df41f | [
"MIT"
] | null | null | null | import abc
from typing import ValuesView
class AttributeProxy:
def __init__(self, name) -> None:
self.name = name
def __get__(self, instance, owner):
return instance.__dict__[self.name]
def __set__(self, instance, value):
instance.__dict__[self.name] = value
class ValidatableAttr(abc.ABC, AttributeProxy):
@abc.abstractmethod
def validate(self, value):
pass
def __set__(self, instance, value):
value = self.validate(value)
super().__set__(instance, value)
class PositiveNumber(ValidatableAttr):
def validate(self, value):
if value > 0:
return value
else:
raise ValueError('value must > 0')
class TextNotEmpty(ValidatableAttr):
def validate(self, value):
value = str(value).strip()
if len(value) > 0:
return value
else:
raise ValueError('text must not empty string')
class Order:
quantity = PositiveNumber('quantity')
price = PositiveNumber('price')
des = TextNotEmpty('des')
def __init__(self, quantity, price, des) -> None:
self.quantity = quantity
self.price = price
self.des = des
def total(self):
return self.quantity*self.price
order = Order(1.5, 5, 'banana')
print(order.total())
print(vars(order))
order2 = Order(2, 3, '')
print(order2.total())
# 7.5
# {'quantity': 1.5, 'price': 5, 'des': 'banana'}
# Traceback (most recent call last):
# File "D:\workspace\python\python-learning-notes\note37\test.py", line 60, in <module>
# order2 = Order(2, 3, '')
# File "D:\workspace\python\python-learning-notes\note37\test.py", line 51, in __init__
# self.des = des
# File "D:\workspace\python\python-learning-notes\note37\test.py", line 22, in __set__
# value = self.validate(value)
# File "D:\workspace\python\python-learning-notes\note37\test.py", line 40, in validate
# raise ValueError('text must not empty string')
# ValueError: text must not empty string | 27.684932 | 89 | 0.644236 | 252 | 2,021 | 5.007937 | 0.293651 | 0.025357 | 0.044374 | 0.063391 | 0.395404 | 0.303487 | 0.27813 | 0.174326 | 0.174326 | 0.174326 | 0 | 0.021767 | 0.227115 | 2,021 | 73 | 90 | 27.684932 | 0.786172 | 0.300346 | 0 | 0.204545 | 0 | 0 | 0.044223 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.204545 | false | 0.022727 | 0.045455 | 0.045455 | 0.522727 | 0.068182 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
85b0727bf78ced43482d2f2348ea8955c8ae7a71 | 274 | py | Python | set.py | mghelton/snmp_research | 5cca96e94506e3aa10df8d45c3db4649071eee22 | [
"MIT"
] | null | null | null | set.py | mghelton/snmp_research | 5cca96e94506e3aa10df8d45c3db4649071eee22 | [
"MIT"
] | null | null | null | set.py | mghelton/snmp_research | 5cca96e94506e3aa10df8d45c3db4649071eee22 | [
"MIT"
] | null | null | null | from pysnmp.hlapi import *
g = setCmd(SnmpEngine(),
CommunityData('public'),
UdpTransportTarget(('192.168.61.6', 161)),
ContextData(),
ObjectType(ObjectIdentity('SNMPv2-MIB', 'sysName', 0),'test'))
gen = next(g)
print(gen[3])
| 30.444444 | 74 | 0.583942 | 29 | 274 | 5.517241 | 0.931034 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.072115 | 0.240876 | 274 | 8 | 75 | 34.25 | 0.697115 | 0 | 0 | 0 | 0 | 0 | 0.142336 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0.125 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
85b577080a0e6e1949e13130cd725e21728ff5fa | 1,464 | bzl | Python | specs2/specs2.bzl | ittaym/rules_scala | 4666a7e4efa6f7c9eaedf10863af23ce184111d3 | [
"Apache-2.0"
] | 2 | 2019-06-17T19:20:57.000Z | 2020-04-22T15:39:04.000Z | specs2/specs2.bzl | ittaym/rules_scala | 4666a7e4efa6f7c9eaedf10863af23ce184111d3 | [
"Apache-2.0"
] | 1 | 2017-06-28T15:43:25.000Z | 2017-08-13T15:56:34.000Z | specs2/specs2.bzl | ittaym/rules_scala | 4666a7e4efa6f7c9eaedf10863af23ce184111d3 | [
"Apache-2.0"
] | 3 | 2017-07-12T07:04:11.000Z | 2018-12-11T12:31:17.000Z | load("//scala:scala_cross_version.bzl",
"scala_mvn_artifact",
)
def specs2_version():
return "3.8.8"
def specs2_repositories():
native.maven_jar(
name = "io_bazel_rules_scala_org_specs2_specs2_core",
artifact = scala_mvn_artifact("org.specs2:specs2-core:" + specs2_version()),
sha1 = "495bed00c73483f4f5f43945fde63c615d03e637",
)
native.maven_jar(
name = "io_bazel_rules_scala_org_specs2_specs2_common",
artifact = scala_mvn_artifact("org.specs2:specs2-common:" + specs2_version()),
sha1 = "15bc009eaae3a574796c0f558d8696b57ae903c3",
)
native.maven_jar(
name = "io_bazel_rules_scala_org_specs2_specs2_matcher",
artifact = scala_mvn_artifact("org.specs2:specs2-matcher:" + specs2_version()),
sha1 = "d2e967737abef7421e47b8994a8c92784e624d62",
)
native.maven_jar(
name = "io_bazel_rules_scala_org_scalaz_scalaz_effect",
artifact = scala_mvn_artifact("org.scalaz:scalaz-effect:7.2.7"),
sha1 = "824bbb83da12224b3537c354c51eb3da72c435b5",
)
native.maven_jar(
name = "io_bazel_rules_scala_org_scalaz_scalaz_core",
artifact = scala_mvn_artifact("org.scalaz:scalaz-core:7.2.7"),
sha1 = "ebf85118d0bf4ce18acebf1d8475ee7deb7f19f1",
)
native.bind(name = 'io_bazel_rules_scala/dependency/specs2/specs2', actual = "@io_bazel_rules_scala//specs2:specs2")
def specs2_dependencies():
return ["//external:io_bazel_rules_scala/dependency/specs2/specs2"]
| 34.046512 | 118 | 0.741803 | 170 | 1,464 | 5.982353 | 0.229412 | 0.106195 | 0.094395 | 0.133727 | 0.526057 | 0.522124 | 0.514258 | 0.245821 | 0.245821 | 0.245821 | 0 | 0.130573 | 0.142077 | 1,464 | 42 | 119 | 34.857143 | 0.67914 | 0 | 0 | 0.147059 | 0 | 0 | 0.50888 | 0.493169 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088235 | true | 0 | 0 | 0.058824 | 0.147059 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
85bdfe99465860c0e5ef243c43925cbed19e1e66 | 250 | py | Python | Proyecto_Global_Hitss_RF_v1/Funciones/Obtener_hora_actual.py | marlonsale08/Marlon | 07570fb4aefd2427564e77c45a15e36e3fca3b19 | [
"MIT"
] | null | null | null | Proyecto_Global_Hitss_RF_v1/Funciones/Obtener_hora_actual.py | marlonsale08/Marlon | 07570fb4aefd2427564e77c45a15e36e3fca3b19 | [
"MIT"
] | null | null | null | Proyecto_Global_Hitss_RF_v1/Funciones/Obtener_hora_actual.py | marlonsale08/Marlon | 07570fb4aefd2427564e77c45a15e36e3fca3b19 | [
"MIT"
] | null | null | null | from datetime import datetime
def greetingTime():
current_hour = datetime.now().hour
if current_hour < 12:
return "Buenos días"
elif 12 <= current_hour < 18:
return "Buenas tardes"
else:
return "Buenas noches" | 25 | 38 | 0.64 | 30 | 250 | 5.233333 | 0.633333 | 0.210191 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033149 | 0.276 | 250 | 10 | 39 | 25 | 0.834254 | 0 | 0 | 0 | 0 | 0 | 0.14741 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
a418f30be67e74f37f6d483c86e7442d6adafeb7 | 225 | py | Python | examples/dir.py | dmytrostriletskyi/design-kit | 6b7cf8657ffe18cd6a43c6cfb73b071084f0331e | [
"MIT"
] | 107 | 2019-03-02T22:31:04.000Z | 2022-01-24T09:57:15.000Z | examples/dir.py | dmytrostriletskyi/accessify | 6b7cf8657ffe18cd6a43c6cfb73b071084f0331e | [
"MIT"
] | 161 | 2019-03-02T17:46:20.000Z | 2020-11-13T15:45:53.000Z | examples/dir.py | dmytrostriletskyi/design-kit | 6b7cf8657ffe18cd6a43c6cfb73b071084f0331e | [
"MIT"
] | 8 | 2019-03-04T13:01:40.000Z | 2022-03-31T06:40:46.000Z | from accessify import accessify, private
@accessify
class Car:
@private
def start_engine(self):
return 'Engine sound.'
if __name__ == '__main__':
car = Car()
assert 'start_engine' not in dir(car)
| 15 | 41 | 0.657778 | 28 | 225 | 4.928571 | 0.678571 | 0.15942 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.244444 | 225 | 14 | 42 | 16.071429 | 0.811765 | 0 | 0 | 0 | 0 | 0 | 0.146667 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0.111111 | false | 0 | 0.111111 | 0.111111 | 0.444444 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
a41a3584d47653dd2b897f2654eeed6f9c1edc8d | 6,285 | py | Python | webDe/settings.py | dannybombastic/Cockfight | ea02132053a590ff407b53dee98ab0d22bfae0dd | [
"Apache-2.0"
] | null | null | null | webDe/settings.py | dannybombastic/Cockfight | ea02132053a590ff407b53dee98ab0d22bfae0dd | [
"Apache-2.0"
] | null | null | null | webDe/settings.py | dannybombastic/Cockfight | ea02132053a590ff407b53dee98ab0d22bfae0dd | [
"Apache-2.0"
] | null | null | null | """
Django settings for webDe project.
Generated by 'django-admin startproject' using Django 2.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
from django.urls import reverse_lazy
try:
from .LocalSetting import *
DEBUG = True
ALLOWED_HOSTS = ['*']
except:
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY =')z(66-zt@g3y_=mh(n(xs8!es%yi0f7aczob9m&m)xikyrx#*6'
# SECURITY WARNING: don't run with debug turned on in production!
#DEBUG = True
#ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'apps.accounts',
'apps.entradas',
#'social_django',
'taggit',
'sslserver',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'webDe.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
],
},
},
]
WSGI_APPLICATION = 'webDe.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
STATIC_ROOT = os.path.join(BASE_DIR, 'staticRoot')
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL ='/media/'
#Social Media Login
AUTHENTICATION_BACKENDS = (
# 'social_core.backends.amazon.AmazonOAuth2',
# # 'social_core.backends.bitbucket.BitbucketOAuth',
# 'social_core.backends.facebook.FacebookAppOAuth2',
# 'social_core.backends.facebook.FacebookOAuth2',
# 'social_core.backends.github.GithubOAuth2',
# 'social_core.backends.gitlab.GitLabOAuth2',
# 'social_core.backends.google.GoogleOAuth',
# 'social_core.backends.google.GoogleOAuth2',
# 'social_core.backends.google.GoogleOpenId',
# 'social_core.backends.google.GooglePlusAuth',
# 'social_core.backends.google_openidconnect.GoogleOpenIdConnect',
# 'social_core.backends.instagram.InstagramOAuth2',
# 'social_core.backends.linkedin.LinkedinOAuth',
# 'social_core.backends.linkedin.LinkedinOAuth2',
# 'social_core.backends.spotify.SpotifyOAuth2',
# 'social_core.backends.trello.TrelloOAuth',
# 'social_core.backends.tumblr.TumblrOAuth',
# 'social_core.backends.twitter.TwitterOAuth',
# 'social_core.backends.yahoo.YahooOAuth',
# 'social_core.backends.yahoo.YahooOpenId',
'django.contrib.auth.backends.ModelBackend',
)
#Para esto hay que tenerlo con https la redireccion
#SOCIAL_AUTH_FACEBOOK_KEY = FACEBOOK_KEY
#SOCIAL_AUTH_FACEBOOK_SECRET = FACEBOOK_SECRET
#para traer el email
#SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
#SOCIAL_AUTH_FACEBOOK_PROFILE_EXTRA_PARAMS = {
# 'fields': 'id,name,email',
#}
#SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = GOOGLE_KEY
#SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = GOOGLE_SECRET
#SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True
#SOCIAL_AUTH_PIPELINE = (
#revisar para implementar que nose repitan los email
# 'utilsSocialPipe.check_email_exists',
# 'social_core.pipeline.social_auth.social_details',
# 'social_core.pipeline.social_auth.social_uid',
# 'social_core.pipeline.social_auth.auth_allowed',
# 'social_core.pipeline.social_auth.social_user',
# 'social_core.pipeline.user.get_username',
# 'social_core.pipeline.mail.mail_validation',
# 'social_core.pipeline.user.create_user',
# 'social_core.pipeline.social_auth.associate_user',
# 'social_core.pipeline.debug.debug',
# 'social_core.pipeline.social_auth.load_extra_data',
# 'social_core.pipeline.user.user_details',
# 'social_core.pipeline.debug.debug'
#)
#Login opcions
#LOGIN_URL = '/login/'
LOGOUT_REDIRECT_URL = reverse_lazy('accounts:login')
LOGIN_REDIRECT_URL = reverse_lazy('entradas:index')
#SSL
SECURE_SSL_REDIRECT = True
CSRF_COOKIE_SECURE = True
#config file
FILE_UPLOAD_PERMISSIONS = 0o644
| 29.78673 | 91 | 0.724582 | 727 | 6,285 | 6.061898 | 0.339752 | 0.072612 | 0.081688 | 0.03971 | 0.217608 | 0.170865 | 0.093261 | 0.093261 | 0.059905 | 0.032675 | 0 | 0.008945 | 0.146221 | 6,285 | 210 | 92 | 29.928571 | 0.812337 | 0.513763 | 0 | 0.044944 | 1 | 0.011236 | 0.47379 | 0.386425 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.05618 | 0.033708 | 0 | 0.033708 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
a41ee16c11a22caabeb9c2d8e601622abe31911d | 285 | py | Python | data/groups.py | bloodes/adressbook | 52582bc8c4825987db668ab084dff32202f1e2e5 | [
"Apache-2.0"
] | null | null | null | data/groups.py | bloodes/adressbook | 52582bc8c4825987db668ab084dff32202f1e2e5 | [
"Apache-2.0"
] | null | null | null | data/groups.py | bloodes/adressbook | 52582bc8c4825987db668ab084dff32202f1e2e5 | [
"Apache-2.0"
] | null | null | null | from models.model_group import Group
testdata = [Group(group_name='', group_header='', group_footer=''),
Group(group_name='name1', group_header='name1', group_footer='name1'),
Group(group_name='name2', group_header='name2', group_footer='name2')
] | 40.714286 | 82 | 0.65614 | 34 | 285 | 5.205882 | 0.323529 | 0.169492 | 0.237288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025862 | 0.185965 | 285 | 7 | 83 | 40.714286 | 0.737069 | 0 | 0 | 0 | 0 | 0 | 0.104895 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
a4231d9ebb1a9fdc4a81aec0b543a74474670f0f | 2,100 | py | Python | design_pattern/memento/implementation.py | AustinHellerRepo/DesignPatternDemo | 2409315eb7494ff738e64e1ca51206fb46083040 | [
"MIT"
] | null | null | null | design_pattern/memento/implementation.py | AustinHellerRepo/DesignPatternDemo | 2409315eb7494ff738e64e1ca51206fb46083040 | [
"MIT"
] | null | null | null | design_pattern/memento/implementation.py | AustinHellerRepo/DesignPatternDemo | 2409315eb7494ff738e64e1ca51206fb46083040 | [
"MIT"
] | null | null | null | from __future__ import annotations
from design_pattern.memento.framework import Originator, Memento, Caretaker
from typing import List
from abc import ABC, abstractmethod
class Implementor(ABC):
def __init__(self):
self.__grocery_list = GroceryList(
title="Untitled"
)
self.__grocery_list_history = Caretaker()
def _save_history(func):
def wrapper(self: Implementor, *args, **kwargs):
_state = self.__grocery_list.get_state()
self.__grocery_list_history.push(
state=_state
)
return func(self, *args, **kwargs)
return wrapper
def undo(self):
_is_successful, _grocery_list_state = self.__grocery_list_history.try_pop()
if _is_successful:
self.__grocery_list.set_state(
state=_grocery_list_state
)
@_save_history
def set_title(self, *, title: str):
self.__grocery_list.set_title(
title=title
)
@_save_history
def add_item(self, *, item: str):
self.__grocery_list.add_item(
item=item
)
def get_title(self) -> str:
return self.__grocery_list.get_title()
def get_items(self) -> List[str]:
return self.__grocery_list.get_items().copy()
@abstractmethod
def show(self):
"""
Provides an interface for the grocery list
"""
raise NotImplementedError()
class GroceryList(Originator):
def __init__(self, *, title: str):
self.__title = title
self.__items = [] # type: List[str]
def set_title(self, *, title: str):
self.__title = title
def get_title(self) -> str:
return self.__title
def add_item(self, *, item: str):
self.__items.append(item)
def get_items(self) -> List[str]:
return self.__items.copy()
def get_state(self) -> GroceryListState:
return GroceryListState(
title=self.__title,
items=self.__items.copy()
)
def set_state(self, *, state: GroceryListState):
self.__title = state.get_title()
self.__items = state.get_items()
class GroceryListState(Memento):
def __init__(self, *, title: str, items: List[str]):
self.__title = title
self.__items = items
def get_title(self) -> str:
return self.__title
def get_items(self) -> List[str]:
return self.__items
| 21 | 77 | 0.717143 | 279 | 2,100 | 4.978495 | 0.200717 | 0.102952 | 0.107991 | 0.047516 | 0.351332 | 0.293737 | 0.222462 | 0.12959 | 0.12959 | 0 | 0 | 0 | 0.164762 | 2,100 | 99 | 78 | 21.212121 | 0.791904 | 0.028095 | 0 | 0.246377 | 0 | 0 | 0.003943 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.275362 | false | 0 | 0.057971 | 0.101449 | 0.507246 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
a43b6bd99ed126f9facb51ef643c518482f41775 | 2,200 | py | Python | cookbook/c08/p18_mixin_classes.py | itpubs/python3-cookbook | 140f5e4cc0416b9674edca7f4c901b1f58fc1415 | [
"Apache-2.0"
] | 3 | 2018-09-19T06:44:13.000Z | 2019-03-24T10:07:07.000Z | cookbook/c08/p18_mixin_classes.py | itpubs/python3-cookbook | 140f5e4cc0416b9674edca7f4c901b1f58fc1415 | [
"Apache-2.0"
] | 2 | 2020-09-19T17:10:23.000Z | 2020-10-17T16:43:52.000Z | cookbook/c08/p18_mixin_classes.py | itpubs/python3-cookbook | 140f5e4cc0416b9674edca7f4c901b1f58fc1415 | [
"Apache-2.0"
] | 1 | 2020-12-22T06:33:18.000Z | 2020-12-22T06:33:18.000Z | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Topic: 混入类
Desc : 如果单独使用Minxin类没有任何意义,但是当利用多继承和其他类配合后就有神奇效果了。
Mixin也是多继承的主要用途。
"""
class LoggedMappingMixin:
"""
Add logging to get/set/delete operations for debugging.
"""
__slots__ = () # 混入类都没有实例变量,因为直接实例化混入类没有任何意义
def __getitem__(self, key):
print('Getting ' + str(key))
return super().__getitem__(key)
def __setitem__(self, key, value):
print('Setting {} = {!r}'.format(key, value))
return super().__setitem__(key, value)
def __delitem__(self, key):
print('Deleting ' + str(key))
return super().__delitem__(key)
class SetOnceMappingMixin:
'''
Only allow a key to be set once.
'''
__slots__ = ()
def __setitem__(self, key, value):
if key in self:
raise KeyError(str(key) + ' already set')
return super().__setitem__(key, value)
class StringKeysMappingMixin:
'''
Restrict keys to strings only
'''
__slots__ = ()
def __setitem__(self, key, value):
if not isinstance(key, str):
raise TypeError('keys must be strings')
return super().__setitem__(key, value)
class LoggedDict(LoggedMappingMixin, dict):
pass
d = LoggedDict()
d['x'] = 23
print(d['x'])
del d['x']
from collections import defaultdict
class SetOnceDefaultDict(SetOnceMappingMixin, defaultdict):
pass
d = SetOnceDefaultDict(list)
d['x'].append(2)
d['x'].append(3)
# d['x'] = 23 # KeyError: 'x already set'
def LoggedMapping(cls):
"""第二种方式:使用类装饰器"""
cls_getitem = cls.__getitem__
cls_setitem = cls.__setitem__
cls_delitem = cls.__delitem__
def __getitem__(self, key):
print('Getting ' + str(key))
return cls_getitem(self, key)
def __setitem__(self, key, value):
print('Setting {} = {!r}'.format(key, value))
return cls_setitem(self, key, value)
def __delitem__(self, key):
print('Deleting ' + str(key))
return cls_delitem(self, key)
cls.__getitem__ = __getitem__
cls.__setitem__ = __setitem__
cls.__delitem__ = __delitem__
return cls
@LoggedMapping
class LoggedDict(dict):
pass
| 21.568627 | 59 | 0.630909 | 246 | 2,200 | 5.211382 | 0.333333 | 0.060062 | 0.054602 | 0.074103 | 0.335413 | 0.326053 | 0.277691 | 0.232449 | 0.232449 | 0.168487 | 0 | 0.004182 | 0.239091 | 2,200 | 101 | 60 | 21.782178 | 0.761649 | 0.148636 | 0 | 0.418182 | 0 | 0 | 0.057979 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.163636 | false | 0.054545 | 0.018182 | 0 | 0.509091 | 0.127273 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
a4413159fd227ec62328bd4292601f85b5ced963 | 2,486 | py | Python | examples.py | JayakrishnanAjayakumar/pcml | 1ac37e95ef68c1661fd4a0b1e6f500a14f103ecb | [
"BSD-3-Clause"
] | 1 | 2018-03-07T20:35:15.000Z | 2018-03-07T20:35:15.000Z | examples.py | Jindam/HPCGISLab | 54ec030cd87b3f6f46ea68cdf007b21344517515 | [
"BSD-3-Clause"
] | null | null | null | examples.py | Jindam/HPCGISLab | 54ec030cd87b3f6f46ea68cdf007b21344517515 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
"""
Copyright (c) 2014 High-Performance Computing and GIS (HPCGIS) Laboratory. All rights reserved.
Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
Authors and contributors: Eric Shook (eshook@kent.edu); Zhengliang Feng (odayfans@gmail.com, zfeng2@kent.edu)
"""
from pcml import *
import os.path as path
import sys,getopt
import numpy as np
if __name__ == '__main__':
# See if number of cores was provided as a command-line parameter
try:
opts,args=getopt.getopt(sys.argv[1:],"n:",["ncores"])
except getopt.GetoptError:
print "test.py -n <cores>"
sys.exit(1)
for opt, arg in opts:
if opt in ("-n", "--num"):
PCMLConfig.num_procs=int(arg)
# Swith to serial evalution if n is 1
if PCMLConfig.num_procs == 1:
PCMLConfig.exectype = ExecutorType.serialpython
print "Number of cores used for spatial data processing: ",PCMLConfig.num_procs
bb=BoundingBox(0,0,10,10)
# Default data directory
datadir="./data/"
# Read 2 test layers
layer1=ReadASCIIGrid(path.join(datadir,"datab.asc"))
layer2=ReadASCIIGrid(path.join(datadir,"dataa.asc"))
# Print out the layers
print "layer1",layer1
layer1.print_data()
print "layer2",layer2
layer2.print_data()
print "\nTesting layer division"
layero=layer1/layer2
layero.print_data()
print "\nTesting layer division"
layero=layer1/layer1
layero.print_data()
print "\nTesting layer addition and multiplication"
layero=(layer1+layer1)*layer2
print "layero",layero
layero.print_data()
print "\nTesting layer subtraction layer1-layer1 "
layero=layer1-layer1
print "layero",layero
layero.print_data()
layero=LocalSum_np(layer1,layer2)
print "layero", layero
layero.print_data()
print "\nTesting FocalMean"
layer1=FocalMean(layer1)
layer1=LocalSin(layer1)
layer3=FocalMean(layer1, buffersize=3)
print "layer1 (FocalMean with buffer = 1)", layer1
layer1.print_data()
print "layer3 (FocalMean with buffer = 3)", layer3
layer3.print_data()
print "\nWriting layer1 and layer3 out as an ASCII grid file and GeoTIFF"
print "Writing",datadir,"layer1.asc"
WriteASCIIGrid(path.join(datadir,"layer1.asc"), layer1)
print "Writing",datadir,"layer3.tif"
WriteGeoTIFF(path.join(datadir,"layer3.tif"), layer3)
| 28.906977 | 109 | 0.678198 | 325 | 2,486 | 5.123077 | 0.436923 | 0.048649 | 0.067267 | 0.066066 | 0.201802 | 0.170571 | 0.124925 | 0.124925 | 0.068468 | 0.068468 | 0 | 0.030659 | 0.212792 | 2,486 | 85 | 110 | 29.247059 | 0.820133 | 0.072003 | 0 | 0.222222 | 0 | 0 | 0.243964 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.074074 | null | null | 0.481481 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
a468bc3f8c7872a6af3170d81c5d24594468a8d8 | 850 | py | Python | custom_imports/importer/simple_finder.py | madman-bob/python-custom-imports | e9d6979865bfde5f149a2190d8f2895d333ab219 | [
"MIT"
] | null | null | null | custom_imports/importer/simple_finder.py | madman-bob/python-custom-imports | e9d6979865bfde5f149a2190d8f2895d333ab219 | [
"MIT"
] | 1 | 2020-05-21T02:36:07.000Z | 2020-05-21T12:55:24.000Z | custom_imports/importer/simple_finder.py | madman-bob/python-custom-imports | e9d6979865bfde5f149a2190d8f2895d333ab219 | [
"MIT"
] | null | null | null | from dataclasses import dataclass, field
from types import ModuleType
from typing import Callable, Iterable, Optional, TypeVar
from custom_imports.importer.types import Finder
from custom_imports.utils import field_required
__all__ = ["SimpleFinder"]
LT = TypeVar("LT") # Locator type.
@dataclass(frozen=True)
class SimpleFinder(Finder[LT]):
"""
A basic Finder class.
SimpleFinder(
locate_module=func,
)
Finds a module locator by calling func(fullname, path, target).
"""
locate_module: Callable[
[str, Iterable[str], Optional[ModuleType]], Optional[LT]
] = field(default_factory=field_required)
def find_module_locator(
self, fullname: str, path: Iterable[str], target: Optional[ModuleType] = None
) -> Optional[LT]:
return self.locate_module(fullname, path, target)
| 25.757576 | 85 | 0.708235 | 100 | 850 | 5.88 | 0.45 | 0.061224 | 0.057823 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.191765 | 850 | 32 | 86 | 26.5625 | 0.855895 | 0.167059 | 0 | 0 | 0 | 0 | 0.02071 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.3125 | 0.0625 | 0.5625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
a48b8d8cba8d89e0d7d26e09e3e988db1dcbd798 | 5,090 | py | Python | services/base_image/fixed/fast_oci_object_storage_models__init__.py | samle-appsbroker/acquire | 34f29dc4173cb87f8e2f49864559b35efb48b21b | [
"Apache-2.0"
] | 21 | 2019-01-29T17:18:45.000Z | 2021-11-07T19:29:31.000Z | services/base_image/fixed/fast_oci_object_storage_models__init__.py | openghg/acquire | 8af8701b092f7304c02fea1ee6360e53502dfd64 | [
"Apache-2.0"
] | 229 | 2020-09-30T15:08:39.000Z | 2022-03-31T14:23:55.000Z | services/base_image/fixed/fast_oci_object_storage_models__init__.py | openghg/acquire | 8af8701b092f7304c02fea1ee6360e53502dfd64 | [
"Apache-2.0"
] | 4 | 2019-06-20T07:49:05.000Z | 2020-03-31T18:11:56.000Z | # coding: utf-8
# Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
from __future__ import absolute_import
import lazy_import as _lazy_import
Bucket = _lazy_import.lazy_class("oci.object_storage.models.bucket.Bucket")
BucketSummary = _lazy_import.lazy_class("oci.object_storage.models.bucket_summary.BucketSummary")
CommitMultipartUploadDetails = _lazy_import.lazy_class("oci.object_storage.models.commit_multipart_upload_details.CommitMultipartUploadDetails")
CommitMultipartUploadPartDetails = _lazy_import.lazy_class("oci.object_storage.models.commit_multipart_upload_part_details.CommitMultipartUploadPartDetails")
CopyObjectDetails = _lazy_import.lazy_class("oci.object_storage.models.copy_object_details.CopyObjectDetails")
CreateBucketDetails = _lazy_import.lazy_class("oci.object_storage.models.create_bucket_details.CreateBucketDetails")
CreateMultipartUploadDetails = _lazy_import.lazy_class("oci.object_storage.models.create_multipart_upload_details.CreateMultipartUploadDetails")
CreatePreauthenticatedRequestDetails = _lazy_import.lazy_class("oci.object_storage.models.create_preauthenticated_request_details.CreatePreauthenticatedRequestDetails")
ListObjects = _lazy_import.lazy_class("oci.object_storage.models.list_objects.ListObjects")
MultipartUpload = _lazy_import.lazy_class("oci.object_storage.models.multipart_upload.MultipartUpload")
MultipartUploadPartSummary = _lazy_import.lazy_class("oci.object_storage.models.multipart_upload_part_summary.MultipartUploadPartSummary")
NamespaceMetadata = _lazy_import.lazy_class("oci.object_storage.models.namespace_metadata.NamespaceMetadata")
ObjectLifecyclePolicy = _lazy_import.lazy_class("oci.object_storage.models.object_lifecycle_policy.ObjectLifecyclePolicy")
ObjectLifecycleRule = _lazy_import.lazy_class("oci.object_storage.models.object_lifecycle_rule.ObjectLifecycleRule")
ObjectNameFilter = _lazy_import.lazy_class("oci.object_storage.models.object_name_filter.ObjectNameFilter")
ObjectSummary = _lazy_import.lazy_class("oci.object_storage.models.object_summary.ObjectSummary")
PreauthenticatedRequest = _lazy_import.lazy_class("oci.object_storage.models.preauthenticated_request.PreauthenticatedRequest")
PreauthenticatedRequestSummary = _lazy_import.lazy_class("oci.object_storage.models.preauthenticated_request_summary.PreauthenticatedRequestSummary")
PutObjectLifecyclePolicyDetails = _lazy_import.lazy_class("oci.object_storage.models.put_object_lifecycle_policy_details.PutObjectLifecyclePolicyDetails")
RenameObjectDetails = _lazy_import.lazy_class("oci.object_storage.models.rename_object_details.RenameObjectDetails")
RestoreObjectsDetails = _lazy_import.lazy_class("oci.object_storage.models.restore_objects_details.RestoreObjectsDetails")
UpdateBucketDetails = _lazy_import.lazy_class("oci.object_storage.models.update_bucket_details.UpdateBucketDetails")
UpdateNamespaceMetadataDetails = _lazy_import.lazy_class("oci.object_storage.models.update_namespace_metadata_details.UpdateNamespaceMetadataDetails")
WorkRequest = _lazy_import.lazy_class("oci.object_storage.models.work_request.WorkRequest")
WorkRequestError = _lazy_import.lazy_class("oci.object_storage.models.work_request_error.WorkRequestError")
WorkRequestLogEntry = _lazy_import.lazy_class("oci.object_storage.models.work_request_log_entry.WorkRequestLogEntry")
WorkRequestResource = _lazy_import.lazy_class("oci.object_storage.models.work_request_resource.WorkRequestResource")
WorkRequestSummary = _lazy_import.lazy_class("oci.object_storage.models.work_request_summary.WorkRequestSummary")
# Maps type names to classes for object_storage services.
object_storage_type_mapping = {
"Bucket": Bucket,
"BucketSummary": BucketSummary,
"CommitMultipartUploadDetails": CommitMultipartUploadDetails,
"CommitMultipartUploadPartDetails": CommitMultipartUploadPartDetails,
"CopyObjectDetails": CopyObjectDetails,
"CreateBucketDetails": CreateBucketDetails,
"CreateMultipartUploadDetails": CreateMultipartUploadDetails,
"CreatePreauthenticatedRequestDetails": CreatePreauthenticatedRequestDetails,
"ListObjects": ListObjects,
"MultipartUpload": MultipartUpload,
"MultipartUploadPartSummary": MultipartUploadPartSummary,
"NamespaceMetadata": NamespaceMetadata,
"ObjectLifecyclePolicy": ObjectLifecyclePolicy,
"ObjectLifecycleRule": ObjectLifecycleRule,
"ObjectNameFilter": ObjectNameFilter,
"ObjectSummary": ObjectSummary,
"PreauthenticatedRequest": PreauthenticatedRequest,
"PreauthenticatedRequestSummary": PreauthenticatedRequestSummary,
"PutObjectLifecyclePolicyDetails": PutObjectLifecyclePolicyDetails,
"RenameObjectDetails": RenameObjectDetails,
"RestoreObjectsDetails": RestoreObjectsDetails,
"UpdateBucketDetails": UpdateBucketDetails,
"UpdateNamespaceMetadataDetails": UpdateNamespaceMetadataDetails,
"WorkRequest": WorkRequest,
"WorkRequestError": WorkRequestError,
"WorkRequestLogEntry": WorkRequestLogEntry,
"WorkRequestResource": WorkRequestResource,
"WorkRequestSummary": WorkRequestSummary
}
| 74.852941 | 168 | 0.862672 | 460 | 5,090 | 9.152174 | 0.195652 | 0.071259 | 0.093112 | 0.126366 | 0.333729 | 0.333729 | 0.333729 | 0.333729 | 0.275297 | 0.174822 | 0 | 0.001876 | 0.057367 | 5,090 | 67 | 169 | 75.970149 | 0.875573 | 0.028684 | 0 | 0 | 0 | 0 | 0.512551 | 0.458502 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
a4a4806c0f26942854d6736a2bed167e72d43f93 | 1,352 | py | Python | tests/regressions/issue_61/test_issue_61.py | bitranox/Arpeggio | 62151cb8ef2cfe5113a4388da09892e7714c5e96 | [
"MIT"
] | null | null | null | tests/regressions/issue_61/test_issue_61.py | bitranox/Arpeggio | 62151cb8ef2cfe5113a4388da09892e7714c5e96 | [
"MIT"
] | null | null | null | tests/regressions/issue_61/test_issue_61.py | bitranox/Arpeggio | 62151cb8ef2cfe5113a4388da09892e7714c5e96 | [
"MIT"
] | null | null | null | # stdlib
import pytest # type: ignore
# proj
from arpeggio import *
def test_ordered_choice_skipws_ws() -> None:
# Both rules will skip white-spaces
def sentence():
return Sequence(ZeroOrMore(word), skipws=True), EOF
def word():
return OrderedChoice([(id, ' ', '.'), id, '.'], skipws=True)
def id():
return 'id'
parser = ParserPython(sentence)
# Thus this parses without problem
# But the length is always 3 + EOF == 4
# First alternative of word rule never matches
tree = parser.parse("id id .")
assert len(tree) == 4
tree = parser.parse("id id.")
assert len(tree) == 4
tree = parser.parse("idid.")
assert len(tree) == 4
tree = parser.parse("idid .")
assert len(tree) == 4
# Now we change skipws flag
def sentence2():
return Sequence(ZeroOrMore(word2), skipws=True), EOF
def word2():
return OrderedChoice([(id, ' ', '.'), id, '.'], skipws=False)
parser = ParserPython(sentence2)
with pytest.raises(NoMatch):
# This can't parse anymore
parser.parse("id id .")
tree = parser.parse("idid.")
assert len(tree) == 4
# This is the case where 'id .' will be matched by the first alternative of
# word as there is no ws skipping
tree = parser.parse("idid .")
assert len(tree) == 3
| 25.509434 | 79 | 0.60429 | 175 | 1,352 | 4.645714 | 0.44 | 0.094711 | 0.110701 | 0.086101 | 0.313653 | 0.242312 | 0.242312 | 0.202952 | 0.162362 | 0.162362 | 0 | 0.012048 | 0.263314 | 1,352 | 52 | 80 | 26 | 0.804217 | 0.244822 | 0 | 0.37931 | 0 | 0 | 0.049554 | 0 | 0 | 0 | 0 | 0 | 0.206897 | 1 | 0.206897 | false | 0 | 0.068966 | 0.172414 | 0.448276 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
8ef85708ab32d77d250dc4442a1b579f02e718b3 | 498 | py | Python | Tests/get_url_test.py | by09115/Flask-URLshortener | 860eef75d86658f91a9316c253b512bf3aad0a6c | [
"MIT"
] | 1 | 2019-01-02T08:50:07.000Z | 2019-01-02T08:50:07.000Z | Tests/get_url_test.py | by09115/Flask-URLshortener | 860eef75d86658f91a9316c253b512bf3aad0a6c | [
"MIT"
] | null | null | null | Tests/get_url_test.py | by09115/Flask-URLshortener | 860eef75d86658f91a9316c253b512bf3aad0a6c | [
"MIT"
] | null | null | null | from Tests import TestCaseBase, check_status_code
class GetUrlTest(TestCaseBase):
def setUp(self):
super(GetUrlTest, self).setUp()
self.short_url = self.save_url_request()
@check_status_code(302)
def test_success_get_url(self):
rv = self.get_url_request('b')
self.assertEqual(rv.handlers['location'], 'http://blog.jaehoon.kim')
return rv
@check_status_code(204)
def test_wrong_url(self):
return self.get_url_request('Pizza')
| 26.210526 | 76 | 0.680723 | 66 | 498 | 4.863636 | 0.515152 | 0.102804 | 0.140187 | 0.105919 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015152 | 0.204819 | 498 | 18 | 77 | 27.666667 | 0.795455 | 0 | 0 | 0 | 0 | 0 | 0.074297 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 1 | 0.230769 | false | 0 | 0.076923 | 0.076923 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
f10df601a7c5749970e3056bb9860eea4ebaf2e2 | 1,186 | py | Python | pyuwb/client_id_utils.py | Jiangshan00001/pyuwb | 9253c9f7481199eddac37dec544723fba31ba996 | [
"MIT"
] | 1 | 2022-03-24T03:08:46.000Z | 2022-03-24T03:08:46.000Z | pyuwb/client_id_utils.py | Jiangshan00001/pyuwb | 9253c9f7481199eddac37dec544723fba31ba996 | [
"MIT"
] | null | null | null | pyuwb/client_id_utils.py | Jiangshan00001/pyuwb | 9253c9f7481199eddac37dec544723fba31ba996 | [
"MIT"
] | null | null | null | __author__ = "songjiangshan"
__copyright__ = "Copyright (C) 2021 songjiangshan \n All Rights Reserved."
__license__ = ""
__version__ = "1.0"
DEVICE_TYPE_TAG=0 #OLD3
DEVICE_TYPE_ANCHOR=1 #OLD2
DEVICE_TYPE_ANCHORZ=2 #OLD1
def client_id_remove_group(client_id):
return str(client_id_get_type(client_id)) + '-' + str(client_id_get_no(client_id))
def client_id_get_no(client_id: str):
"""
输入字符串,返回对应的号
:param client_id:
:return: int型数值
"""
id = int(client_id.split('-')[-1])
return id
def client_id_get_type(client_id):
id = int(client_id.split('-')[-2])
return id
def client_id_get_group(client_id):
id = int(client_id.split('-')[0])
return id
def pack_client_id(group_id=0, type_int=DEVICE_TYPE_ANCHOR, no=0):
ret = str(group_id) + '-' + str(type_int) + '-' + str(no)
return ret
if __name__ == '__main__':
assert client_id_get_no('1-2-3') == 3
assert client_id_get_no('1-2-20') == 20
assert client_id_get_no('1-12-21') == 21
print(client_id_get_type('1-12-21'))
# assert client_id_get_type('1-2-3') == 2
# assert client_id_get_group('1-2-3') == 1
# assert client_id_get_group('2-2-3') == 2
| 23.72 | 86 | 0.667791 | 195 | 1,186 | 3.615385 | 0.246154 | 0.27234 | 0.187234 | 0.144681 | 0.459574 | 0.321986 | 0.133333 | 0 | 0 | 0 | 0 | 0.052254 | 0.177066 | 1,186 | 49 | 87 | 24.204082 | 0.670082 | 0.152614 | 0 | 0.115385 | 0 | 0 | 0.11373 | 0 | 0 | 0 | 0 | 0 | 0.115385 | 1 | 0.192308 | false | 0 | 0 | 0.038462 | 0.384615 | 0.038462 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f111e8f8ba25e9c2ff8287a1f8d7026ab95f0fc1 | 2,012 | py | Python | apitest/api_test/migrations/0012_auto_20200219_1756.py | willhuang1206/apitest | 4b41855710ba8f21788027da83a830f631e11f26 | [
"Apache-2.0"
] | null | null | null | apitest/api_test/migrations/0012_auto_20200219_1756.py | willhuang1206/apitest | 4b41855710ba8f21788027da83a830f631e11f26 | [
"Apache-2.0"
] | 3 | 2020-06-06T01:57:41.000Z | 2021-06-10T22:57:58.000Z | apitest/api_test/migrations/0012_auto_20200219_1756.py | willhuang1206/apitest | 4b41855710ba8f21788027da83a830f631e11f26 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.0.2 on 2020-02-19 17:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api_test', '0011_apiautomationcoverage_num'),
]
operations = [
migrations.AlterField(
model_name='apiautomationcoverage',
name='num',
field=models.IntegerField(blank=True, default=0, null=True, verbose_name='关联数量'),
),
migrations.AlterField(
model_name='apiautomationcoverage',
name='project',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='coverage_project', to='api_test.Project', verbose_name='项目'),
),
migrations.AlterField(
model_name='apiparameter',
name='value',
field=models.TextField(blank=True, null=True, verbose_name='参数值'),
),
migrations.AlterField(
model_name='apiresponse',
name='value',
field=models.TextField(blank=True, null=True, verbose_name='参数值'),
),
migrations.AlterField(
model_name='automation',
name='params',
field=models.TextField(blank=True, null=True, verbose_name='参数'),
),
migrations.AlterField(
model_name='automationresult',
name='automation',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='result_automation', to='api_test.Automation', verbose_name='用例'),
),
migrations.AlterField(
model_name='automationstep',
name='params',
field=models.TextField(blank=True, null=True, verbose_name='参数'),
),
migrations.AlterField(
model_name='automationtask',
name='params',
field=models.TextField(blank=True, null=True, verbose_name='参数'),
),
]
| 36.581818 | 183 | 0.60835 | 203 | 2,012 | 5.891626 | 0.29064 | 0.133779 | 0.167224 | 0.19398 | 0.551839 | 0.551839 | 0.461538 | 0.461538 | 0.461538 | 0.461538 | 0 | 0.013541 | 0.265905 | 2,012 | 54 | 184 | 37.259259 | 0.796209 | 0.022366 | 0 | 0.583333 | 1 | 0 | 0.149109 | 0.036641 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.041667 | 0 | 0.104167 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f113bc3867136bab6f6070ecf2922c70504f996e | 574 | py | Python | submission/counter_reactionary.py | leomaurodesenv/rock-paper-scissors-kaggle | c9690378757481b2ab1bce0911a50101e1bdacde | [
"CC0-1.0"
] | null | null | null | submission/counter_reactionary.py | leomaurodesenv/rock-paper-scissors-kaggle | c9690378757481b2ab1bce0911a50101e1bdacde | [
"CC0-1.0"
] | null | null | null | submission/counter_reactionary.py | leomaurodesenv/rock-paper-scissors-kaggle | c9690378757481b2ab1bce0911a50101e1bdacde | [
"CC0-1.0"
] | null | null | null |
import random
from kaggle_environments.envs.rps.utils import get_score
last_counter_action = None
def counter_reactionary(observation, configuration):
global last_counter_action
if observation.step == 0:
last_counter_action = random.randrange(0, configuration.signs)
elif get_score(last_counter_action, observation.lastOpponentAction) == 1:
last_counter_action = (last_counter_action + 2) % configuration.signs
else:
last_counter_action = (observation.lastOpponentAction + 1) % configuration.signs
return last_counter_action
| 31.888889 | 88 | 0.770035 | 67 | 574 | 6.298507 | 0.447761 | 0.208531 | 0.322275 | 0.090047 | 0.300948 | 0.222749 | 0 | 0 | 0 | 0 | 0 | 0.010395 | 0.162021 | 574 | 17 | 89 | 33.764706 | 0.866944 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.166667 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f118ab6fe117fde800f9a346ebf9d9692538566f | 3,492 | py | Python | django_cradmin/demo/project/test/settings.py | appressoas/django_cradmin | 0f8715afdfe1ad32e46033f442e622aecf6a4dec | [
"BSD-3-Clause"
] | 11 | 2015-07-05T16:57:58.000Z | 2020-11-24T16:58:19.000Z | django_cradmin/demo/project/test/settings.py | appressoas/django_cradmin | 0f8715afdfe1ad32e46033f442e622aecf6a4dec | [
"BSD-3-Clause"
] | 91 | 2015-01-08T22:38:13.000Z | 2022-02-10T10:25:27.000Z | django_cradmin/demo/project/test/settings.py | appressoas/django_cradmin | 0f8715afdfe1ad32e46033f442e622aecf6a4dec | [
"BSD-3-Clause"
] | 3 | 2016-12-07T12:19:24.000Z | 2018-10-03T14:04:18.000Z | """
Django settings for running the django_cradmin tests.
"""
from django_dbdev.backends.sqlite import DBSETTINGS
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y%j0x%=7a^sf53m*s^5nbmfe0_t13d*oibfx#m#*wz1x+k6+m1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_cradmin',
'django_cradmin.django_cradmin_testapp',
'django_cradmin.apps.cradmin_generic_token_with_metadata',
'django_cradmin.apps.cradmin_authenticate',
'django_cradmin.apps.cradmin_resetpassword',
'django_cradmin.apps.cradmin_activate_account',
'django_cradmin.apps.cradmin_register_account',
'django_cradmin.apps.cradmin_invite',
'django_cradmin.apps.cradmin_email',
'django_cradmin.apps.django_cradmin_js',
'django_cradmin.uicontainer',
'django_cradmin.tests.test_sortable.cradmin_sortable_testapp',
'django_cradmin.tests.test_viewhelpers.cradmin_viewhelpers_testapp',
'django_cradmin.apps.cradmin_authenticate.tests.cradmin_authenticate_testapp',
'django_cradmin.apps.cradmin_register_account.tests.cradmin_register_account_testapp',
'django_cradmin.apps.cradmin_email.tests.cradmin_email_testapp',
)
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
# insert your TEMPLATE_DIRS here
],
'APP_DIRS': True,
'OPTIONS': {
'debug': True,
'context_processors': [
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"django.template.context_processors.request",
"django_cradmin.context_processors.cradmin",
],
},
},
]
# ROOT_URLCONF = 'django_cradmin.demo.project.urls'
# We do not set a name -- the test framework does that.
DATABASES = {
'default': DBSETTINGS
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
MEDIA_ROOT = 'test_django_media_root'
STATIC_ROOT = 'test_django_static_root'
ROOT_URLCONF = 'django_cradmin.demo.project.test.urls'
DJANGO_CRADMIN_SITENAME = 'Testsite'
DJANGO_CRADMIN_REGISTER_ACCOUNT_FORM_CLASS = \
'django_cradmin.apps.cradmin_register_account.forms.auth_user.AuthUserCreateAccountForm'
DJANGO_CRADMIN_INCLUDE_TEST_CSS_CLASSES = True
| 32.333333 | 92 | 0.725086 | 385 | 3,492 | 6.311688 | 0.376623 | 0.139095 | 0.083951 | 0.108642 | 0.188477 | 0.100823 | 0.023868 | 0 | 0 | 0 | 0 | 0.007893 | 0.165521 | 3,492 | 107 | 93 | 32.635514 | 0.826012 | 0.146048 | 0 | 0.027027 | 0 | 0.013514 | 0.640809 | 0.595953 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.013514 | 0.013514 | 0 | 0.013514 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f11be192d1ddd50cdda688a06bb1a1af21e7e165 | 428 | py | Python | mnist/configs/resnet18.py | anisayari/pipeline | 48313bc5c459fde0d3fc0acd9f78ccfb677a5197 | [
"MIT"
] | 1 | 2019-02-14T13:59:03.000Z | 2019-02-14T13:59:03.000Z | mnist/configs/resnet18.py | anisayari/pipeline | 48313bc5c459fde0d3fc0acd9f78ccfb677a5197 | [
"MIT"
] | null | null | null | mnist/configs/resnet18.py | anisayari/pipeline | 48313bc5c459fde0d3fc0acd9f78ccfb677a5197 | [
"MIT"
] | null | null | null | from .base import ConfigMNISTBase
from pipeline.models.classification import ClassificationModuleLinear
from pipeline.models.image_classification import Resnet18Model
import torch.nn as nn
class Config(ConfigMNISTBase):
def __init__(self):
model = nn.Sequential(
Resnet18Model(),
ClassificationModuleLinear(Resnet18Model.NUM_FEATURES, 10)
)
super().__init__(model=model)
| 25.176471 | 70 | 0.733645 | 41 | 428 | 7.414634 | 0.585366 | 0.078947 | 0.118421 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023324 | 0.198598 | 428 | 16 | 71 | 26.75 | 0.862974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.363636 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
f139fb71cd890e05932e05089ee6bc01628c1c14 | 1,890 | py | Python | core/logger.py | nragon/keeper | e7e668a89799759009dacb1e33bae07b3bcea40a | [
"MIT"
] | 28 | 2018-11-29T10:33:35.000Z | 2022-01-18T10:36:08.000Z | core/logger.py | nragon/keeper | e7e668a89799759009dacb1e33bae07b3bcea40a | [
"MIT"
] | 3 | 2019-04-26T14:13:29.000Z | 2020-05-22T20:12:01.000Z | core/logger.py | nragon/keeper | e7e668a89799759009dacb1e33bae07b3bcea40a | [
"MIT"
] | 9 | 2019-01-08T04:20:46.000Z | 2022-01-03T05:14:48.000Z | # -*- coding: utf-8 -*-
"""
Provides base logging functions
:copyright: © 2018 by Nuno Gonçalves
:license: MIT, see LICENSE for more details.
"""
from logging import getLevelName, INFO, WARN, ERROR, DEBUG
from multiprocessing import current_process
from time import strftime
from core.common import load_config
from core.constants import TIME_FORMAT
class Logger(object):
"""
logger class that prints to stdout
"""
def __init__(self):
"""
partially initializes format
"""
self.format = "%s " + current_process().name + "-keeper[%s]: %s"
self.is_debug = bool(load_config()["debug"])
def info(self, message):
"""
prints an info message
:param message: message
"""
self._log(INFO, message)
def warning(self, message):
"""
prints a warning message
:param message: message
"""
self._log(WARN, message)
def error(self, message):
"""
prints an error message
:param message: message
"""
self._log(ERROR, message)
def debug(self, message, *args):
"""
prints a debug message
:param message: message
:param args: arguments
"""
self.log(DEBUG, message, *args)
def log(self, level, message, *args):
"""
prints a message with args
:param level: log level
:param message: message
:param args: arguments
"""
if level != DEBUG or self.is_debug:
if args:
message = message % args
self._log(level, message)
def _log(self, level, message):
"""
prints a message
:param level: log level
:param message: message
"""
print(self.format % (strftime(TIME_FORMAT), getLevelName(level), message))
| 22.771084 | 82 | 0.566667 | 206 | 1,890 | 5.121359 | 0.325243 | 0.079621 | 0.108057 | 0.098578 | 0.25782 | 0.216114 | 0.070142 | 0 | 0 | 0 | 0 | 0.003931 | 0.326984 | 1,890 | 82 | 83 | 23.04878 | 0.824686 | 0.305291 | 0 | 0 | 0 | 0 | 0.022179 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.291667 | false | 0 | 0.208333 | 0 | 0.541667 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
f13a4cc826ea37913e60b843ea7611564973e259 | 149 | py | Python | terms_finder/__init__.py | elderdk/pyxliff | ae641f0e93790498798f64ce1b64b16e9334cf75 | [
"MIT"
] | 2 | 2021-12-10T17:16:14.000Z | 2022-02-28T12:37:21.000Z | terms_finder/__init__.py | elderdk/pyxliff | ae641f0e93790498798f64ce1b64b16e9334cf75 | [
"MIT"
] | null | null | null | terms_finder/__init__.py | elderdk/pyxliff | ae641f0e93790498798f64ce1b64b16e9334cf75 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# pyxliff/__init__.py
"""Provides useful functions for SDLXliff terms verification and discovery."""
__version__ = "0.1.0"
| 21.285714 | 78 | 0.697987 | 19 | 149 | 5.052632 | 0.947368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.03125 | 0.14094 | 149 | 6 | 79 | 24.833333 | 0.71875 | 0.771812 | 0 | 0 | 0 | 0 | 0.185185 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f140a85804be9f86cc527818513ef5706f31e72f | 2,251 | py | Python | codewar/4 By 4 Skyscrapers -2k/4 By 4 Skyscrapers.py | z7211979/practise-Python | fb3c72b5e094b4102c9cce0513f48fca6ef4d8bc | [
"MIT"
] | 1 | 2018-08-30T23:11:47.000Z | 2018-08-30T23:11:47.000Z | codewar/4 By 4 Skyscrapers -2k/4 By 4 Skyscrapers.py | z7211979/practise-Python | fb3c72b5e094b4102c9cce0513f48fca6ef4d8bc | [
"MIT"
] | null | null | null | codewar/4 By 4 Skyscrapers -2k/4 By 4 Skyscrapers.py | z7211979/practise-Python | fb3c72b5e094b4102c9cce0513f48fca6ef4d8bc | [
"MIT"
] | null | null | null | #!/usr/bin/python3
# -*- coding: utf-8 -*-
'''
not worked
'''
import sys
sys.path.append('../')
import cw as test
import time
N=4
def rotate(matrix):
"""
:contrarotate matrix.
"""
return list(map(list,zip(*matrix[::])))[::-1]
def visible_onerow(array):
"""
:return visible number in one row.
"""
out_temp = 1
len_t=len(array)
for i in range(len_t - 1):
for j in range(i+1,len_t):
if array[i] < array[j]:
break
else:
pass
else :
out_temp += 1
return out_temp
def visible_cover(array):
out_temp = []
for i in range(len(array)):
out_temp.append(visible_onerow(array[i]))
return out_temp
def visible_allcover(array):
out_temp = []
temp =array
for i in range(N):
out_temp.extend(visible_cover(temp))
temp=rotate(temp)
out_temp = out_temp[-N:] + out_temp[:-N]
return out_temp
def clues_compare(list1,original):
for i in range(len(list1)):
if original[i]:
if original[i] == list1[i] :
pass
else :
return False
else :
continue
else:
return True
def solve_puzzle(clues):
temp =[0,0,0,0]*4
for i in range(N):
for j in range(N):
for x in range(1,N+1):
return visible_allcover(clues)
#return ( (1, 2, 3, 4), (2, 3, 4, 1), (3, 4, 1, 2), (4, 1, 2, 3) )
if __name__ == "__main__":
t0 = time.perf_counter()
clues = ((2, 2, 1, 3, 2, 2, 3, 1, 1, 2, 2, 3, 3, 2, 1, 3),
(0, 0, 1, 2, 0, 2, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0))
outcomes = (((1, 3, 4, 2), (4, 2, 1, 3), (3, 4, 2, 1), (2, 1, 3, 4)),
((2, 1, 4, 3), (3, 4, 1, 2), (4, 2, 3, 1), (1, 3, 2, 4)))
test.describe("4 by 4 skyscrapers")
test.it("should pass all the tests provided")
print(clues_compare(solve_puzzle(outcomes[0]),clues[0]))
# test.assert_equals(solve_puzzle(clues[0]), outcomes[0])
# test.assert_equals(solve_puzzle(clues[1]), outcomes[1])
print(time.perf_counter() - t0)
'''
def snail(array):
return list(array[0]) + snail(zip(*array[1:])[::-1]) if array else []
'''
| 21.438095 | 73 | 0.513994 | 345 | 2,251 | 3.24058 | 0.234783 | 0.075134 | 0.026834 | 0.049195 | 0.177102 | 0.059034 | 0.059034 | 0 | 0 | 0 | 0 | 0.071845 | 0.313638 | 2,251 | 104 | 74 | 21.644231 | 0.65178 | 0.095957 | 0 | 0.237288 | 0 | 0 | 0.034596 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.050847 | 0.050847 | null | null | 0.033898 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
f1466265e389573bb78149ea31304ec7d422874a | 22,156 | py | Python | applications/cpfecys/controllers/default.py | SIMBitGt-Company/RepoRepo | b84c39d9643a0c5e51291d5e62e36b6080a7e622 | [
"BSD-3-Clause"
] | null | null | null | applications/cpfecys/controllers/default.py | SIMBitGt-Company/RepoRepo | b84c39d9643a0c5e51291d5e62e36b6080a7e622 | [
"BSD-3-Clause"
] | null | null | null | applications/cpfecys/controllers/default.py | SIMBitGt-Company/RepoRepo | b84c39d9643a0c5e51291d5e62e36b6080a7e622 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## This is a sample controller
## - index is the default action of any application
## - user is required for authentication and authorization
## - download is for downloading files uploaded in the db (does streaming)
## - call exposes all registered services (none by default)
#########################################################################
def events():
#emarquez
period = request.vars['period'] or False
cyearperiod = cpfecys.current_year_period()
#emarquez
if period:
cyearperiod = db(db.period_year.id == int(period)).select().first()
return dict(year = cyearperiod.yearp, semester = cyearperiod.period.name,
thing = db((db.public_event.semester == cyearperiod.id)&
(db.public_event.assignation != None)&
(db.public_event.assignation == db.user_project.id)&
(db.user_project.project == db.project.id) \
).select(orderby=db.project.name))
@auth.requires_login()
def event_editor():
assignation = request.vars['assignation']
#check assignation belongs to this user
import cpfecys
check = db((db.user_project.assigned_user==auth.user.id)&\
(db.user_project.id == assignation)&\
((db.user_project.period <= cpfecys.current_year_period().id) & \
((db.user_project.period + db.user_project.periods) > cpfecys.current_year_period().id))).select(db.user_project.ALL).first()
if (check is None):
redirect(URL('default','index'))
cyearperiod = cpfecys.current_year_period()
db.public_event.semester.default = cyearperiod.id
db.public_event.semester.writable = False
db.public_event.semester.readable = False
db.public_event.assignation.default = check.id
db.public_event.assignation.writable = False
db.public_event.assignation.readable = False
db.public_event_schedule.public_event.readable = False
db.public_event_schedule.public_event.writable = False
query = (db.public_event.assignation == check.id)
return dict(year = cyearperiod.yearp, semester = cyearperiod.period.name,name = check.project.name,grid = SQLFORM.smartgrid(db.public_event, constraints = {'public_event' : query}))
def index():
"""
example action using the internationalization operator T and flash
rendered by views/default/index.html or views/generic.html
if you need a simple wiki simple replace the two lines below with:
return auth.wiki()
"""
if auth.user != None:
groups = db((db.auth_membership.user_id==auth.user.id)& \
(db.auth_group.id==db.auth_membership.group_id)). \
select(db.auth_group.ALL)
front_notification = \
db(db.front_notification.is_public==True).select()| \
db((db.front_notification.id== \
db.notification_access.front_notification)& \
(db.notification_access.user_role.belongs(groups))
).select(db.front_notification.ALL)
else:
front_notification = db(db.front_notification.is_public == True).select()
return dict(front_notification=front_notification,
markmin_settings = cpfecys.get_markmin,)
def links():
""" This url shows all important links published by admin
user.
"""
links = []
if auth.user != None:
links = db(db.link).select()
groups = db((db.auth_membership.user_id==auth.user.id)& \
(db.auth_group.id==db.auth_membership.group_id)). \
select(db.auth_group.ALL)
links = db((db.link.id == db.link_access.link)& \
(db.link_access.user_role.belongs(groups))).select(db.link.ALL)
public_links = db(db.link.is_public == True).select()
return dict(links=links, public_links=public_links)
def files():
""" This url shows all published files published by admin"""
if auth.user != None:
groups = db((db.auth_membership.user_id==auth.user.id)&\
(db.auth_group.id==db.auth_membership.group_id)).\
select(db.auth_group.ALL)
files = db((db.uploaded_file.id == db.file_access.uploaded_file)&\
(db.file_access.user_role.belongs(groups)))\
.select(db.uploaded_file.ALL)
else:
files = db(db.uploaded_file.is_public == True).select()
return dict(files=files)
def user():
"""
exposes:
http://..../[app]/default/user/login
http://..../[app]/default/user/logout
http://..../[app]/default/user/register
http://..../[app]/default/user/profile
http://..../[app]/default/user/retrieve_password
http://..../[app]/default/user/change_password
http://..../[app]/default/user/manage_users (requires membership in
use @auth.requires_login()
@auth.requires_membership('group name')
@auth.requires_permission('read','table name',record_id)
to decorate functions that need access control
"""
if request.args(0) == 'profile':
if ((auth.has_membership('Super-Administrator') == False) & (auth.has_membership('Teacher') == False) & (auth.has_membership('Ecys-Administrator') == False)):
db.auth_user.first_name.writable = False
db.auth_user.last_name.writable = False
db.auth_user.username.writable = False
db.auth_user.email.writable = False
import cpfecys
currentyear_period = cpfecys.current_year_period()
for date_var in db((db.student_control_period.period_name==T(str(currentyear_period.period.name))+" "+str(currentyear_period.yearp))).select():
var_date_finish = date_var.date_finish
from datetime import datetime
if datetime.now() > date_var.date_start and datetime.now() < var_date_finish:
db.auth_user.email.writable = True
pass
pass
db.auth_user.photo.writable = True
review = db((db.photo_review.user_id == auth.user.id)).select().first()
if review is not None:
if review.accepted == True:
db.auth_user.photo.writable = False
pass
pass
pass
if (auth.has_membership('Teacher')):
if str(request.vars["edit_foto"]) == "True":
db.auth_user.photo.writable = True
else:
db.auth_user.photo.writable = False
db.auth_user.username.writable = False
pass
return dict(form=auth())
#CERODAS 1: Function to validated if the user has update data or not
def GetUserUpdateData():
if request.vars['Username'] != '':
row = db(db.auth_user.username==request.vars['Username']).select().first()
if row != None:
#obtengo el grupo del usuario: si estudiante aplica sino no aplica metodo
group_id_Student = db(db.auth_group.role == 'Student').select().first()
group_id_RegularStudent = db(db.auth_group.role == "Academic").select().first()
#si no hay informacion de los grupos se devuelve -1
if group_id_Student is None:
group_id_Student = -1
if group_id_RegularStudent is None:
group_id_RegularStudent = -1
#se verifica si el usuario pertenece a algun grupo de estudiantes
Student = db((row.id==db.auth_membership.user_id) & ((db.auth_membership.group_id==group_id_Student) | (db.auth_membership.group_id==group_id_RegularStudent))).select().first()
if Student is None:
is_Student = False
else:
is_Student = True
#si es estudiante se aplica el nuevo metodo de lo contrario si no es estudiante se pasa por el procedimiento normal
if is_Student:
UserInfo = row.data_updated
if UserInfo is None:
session.username = request.vars['Username'];
redirect(URL('first_request_password'))
else:
session.username = request.vars['Username'];
session.flash = ('Usuario verificado correctamente, puede proceder a solicitar su password via email.')
redirect(URL('user', args=('request_reset_password'), vars=dict(message='UPDATED')))
else:
session.username = request.vars['Username'];
session.flash = ('Usuario verificado correctamente, puede proceder a solicitar su password via email.')
redirect(URL('user', args=('request_reset_password'), vars=dict(message='UPDATED')))
else:
session.flash = T('Invalid username')
redirect(URL('user', args=('request_reset_password'), vars=dict(message='NOT_RESULT')))
else:
session.flash = T('Username') + ' ' + T('Cannot be empty')
redirect(URL('user', args=('request_reset_password'), vars=dict(message='NOT_DATA')))
#CERODAS 1: Copy from WEBSERVICE check_user from check_user in controller student_academic
#Changes: Not valid user logon
def check_student(check_carnet):
svp=db(db.validate_student).select().first()
if svp is not None:
try:
#CONSUME THE WEBSERVICE
from gluon.contrib.pysimplesoap.client import SoapClient
from gluon.contrib.pysimplesoap.client import SimpleXMLElement
client = SoapClient(
location = svp.supplier,
action = svp.supplier+"/"+svp.action_service,
namespace = svp.supplier,
soap_ns=svp.type_service, trace = True, ns = False)
import cpfecys
year = cpfecys.current_year_period()
sent="<"+svp.send+">"
for svpf in db(db.validate_student_parameters).select():
sent +="<"+svpf.parameter_name_validate+">"+svpf.parameter_value_validate+"</"+svpf.parameter_name_validate+">"
sent += "<CARNET>"+str(check_carnet)+"</CARNET><CICLO>"+str(year.yearp)+"</CICLO></"+svp.send+">"
back = client.call(svp.action_service,xmlDatos=sent)
#PREPARE FOR RETURNED XML WEB SERVICE
xml = back.as_xml()
xml=xml.replace('<','<')
xml=xml.replace('>','>')
inicio = xml.find("<"+svp.receive+">")
final = xml.find("</"+svp.receive+">")
xml = xml[inicio:(final+17)]
import xml.etree.ElementTree as ET
root = ET.fromstring(xml)
xml = SimpleXMLElement(xml)
#VARIABLE TO CHECK THE CORRECT FUNCTIONING
CARNET = xml.CARNET
NOMBRES = xml.NOMBRES
APELLIDOS= xml.APELLIDOS
CORREO = xml.CORREO
#Unicode Nombres
try:
str(NOMBRES)
except:
apellidos_var = unicode(NOMBRES).split(' ')
appellidos_return = None
for apellido in apellidos_var:
try:
if appellidos_return is None:
appellidos_return = str(apellido)
else:
appellidos_return = appellidos_return + " " + str(apellido)
except:
try:
temp = unicode(apellido).encode('utf-8').replace('á','á').replace('é','é').replace('Ã','í').replace('ó','ó').replace('ú','ú').replace('ñ','ñ').replace('Ã','Á').replace('É','É').replace('Ã','Í').replace('Ó','Ó').replace('Ú','Ú').replace('Ñ','Ñ').replace('ü‘','ü')
except:
None
apellido = temp
if appellidos_return is None:
appellidos_return = str(apellido)
else:
appellidos_return = appellidos_return + " " + str(apellido)
NOMBRES = appellidos_return
#Unicode APELLIDOS
try:
str(APELLIDOS)
except:
apellidos_var = unicode(APELLIDOS).split(' ')
appellidos_return = None
for apellido in apellidos_var:
try:
if appellidos_return is None:
appellidos_return = str(apellido)
else:
appellidos_return = appellidos_return + " " + str(apellido)
except:
try:
temp = unicode(apellido).encode('utf-8').replace('á','á').replace('é','é').replace('Ã','í').replace('ó','ó').replace('ú','ú').replace('ñ','ñ').replace('Ã','Á').replace('É','É').replace('Ã','Í').replace('Ó','Ó').replace('Ú','Ú').replace('Ñ','Ñ').replace('ü‘','ü')
except:
None
apellido = temp
if appellidos_return is None:
appellidos_return = str(apellido)
else:
appellidos_return = appellidos_return + " " + str(apellido)
APELLIDOS = appellidos_return
if (CARNET is None or CARNET=='') and (NOMBRES is None or NOMBRES=='') and (APELLIDOS is None or APELLIDOS=='') and (CORREO is None or CORREO==''):
return dict(flag=False,error=False,message=T('The record was removed because the user is not registered to the academic cycle'))
else:
isStuden=False
for c in root.findall('CARRERA'):
if c.find('UNIDAD').text=="08" and c.find('EXTENSION').text=="00" and (c.find('CARRERA').text=="05" or c.find('CARRERA').text=="09" or c.find('CARRERA').text=="07"):
isStuden=True
if isStuden==False:
return dict(flag=False,error=False,message=T('The record was removed because students not enrolled in career allowed to use the system'))
else:
return dict(flag=True, carnet=int(str(CARNET)), nombres=(NOMBRES), apellidos=(APELLIDOS), correo=str(CORREO),error=False)
except:
return dict(flag=False,error=True,message=T('Error with web service validation'))
else:
return dict(flag=False,error=True,message=T('Error with web service validation'))
#CERODAS 1: new method from password recovery
def first_request_password():
Mensaje = ''
web_service = check_student(session.username)
if web_service['flag'] == True:
Nombres = web_service['nombres']
if Nombres is None:
Nombres = ''
Apellidos = web_service['apellidos']
if Apellidos is None:
Apellidos = ''
Email = web_service['correo']
#Email = 'Carlos8_r@hotmail.com'
if Email is None:
Email = ''
Carnet = web_service['carnet']
if Carnet is None:
Carnet = ''
FullName = str(Nombres) + " " + str(Apellidos)
if Email == '':
MessageHead = "Lastimosamente no existe informacion de tu email en nuestras bases de datos, por lo que debes actualizar tu informacion en Registro y Estadistica."
else:
#Obtengo la informacion del usuario
Usua = db(db.auth_user.username==session.username).select().first()
#Seteo el valor del Email obtenido por el webservices
Usua.email = Email
#Mando el email para recuperacion
auth.email_reset_password(Usua)
#Construyo el mensaje
MessageHead = "Te hemos enviado un email a "
MessageMiddle = str(Email)
MessageFinal = " , el cual está registrado en el sistema de Registro y Estadística. Si esta cuenta ya no está activa procede a solicitar tu cambio de correo en Registro y Estadística."
return dict(FullName = FullName, MessageHead = MessageHead, MessageMiddle = MessageMiddle,MessageFinal=MessageFinal )
else:
session.flash = 'Upss!!! Ocurrio un problema con el sistema de Registro y Estadística, intenta nuevamente.'
redirect(URL('user', args=('request_reset_password')))
pass
@cache.action()
def download():
"""
allows downloading of uploaded files
http://..../[app]/default/download/[filename]
"""
return response.download(request, db)
def download_file():
the_file = db(db.uploaded_file.file_data==request.args[0]).select().first()
if the_file != None and the_file.visible == True and the_file.is_public == True:
return response.download(request, db)
else:
session.flash = T('Access Forbidden')
redirect(URL('default', 'index'))
def call():
"""
exposes services. for example:
http://..../[app]/default/call/jsonrpc
decorate with @services.jsonrpc the functions to expose
supports xml, json, xmlrpc, jsonrpc, amfrpc, rss, csv
"""
return service()
@auth.requires_signature()
def data():
"""
http://..../[app]/default/data/tables
http://..../[app]/default/data/create/[table]
http://..../[app]/default/data/read/[table]/[id]
http://..../[app]/default/data/update/[table]/[id]
http://..../[app]/default/data/delete/[table]/[id]
http://..../[app]/default/data/select/[table]
http://..../[app]/default/data/search/[table]
but URLs must be signed, i.e. linked with
A('table',_href=URL('data/tables',user_signature=True))
or with the signed load operator
LOAD('default','data.load',args='tables',ajax=True,user_signature=True)
"""
return dict(form=crud())
def zip():
files = ['item.uploaded_file.bd4592bbb798c7c6.3235363035372e706466.pdf']
return response.zip(request, files, db)
def resources():
#Get the selected item_restriction id from parameter
item_restriction_id = request.vars['r']
#Get the items that belong to current semester
import cpfecys
#emarquez: por default, sigue igual, si envia parametro d periodo, se cambia
parameter_period = request.vars['period'] or False
period = cpfecys.current_year_period()
#emarquez
if parameter_period:
period = db(db.period_year.id == parameter_period).select().first()
def teachers_on_project(project_id):
import cpfecys
#period = cpfecys.current_year_period()
if cpfecys.is_semestre(period.id):
return db((db.project.id == project_id)&\
(db.user_project.project == db.project.id)&\
(db.auth_user.id == db.user_project.assigned_user)&\
((db.user_project.period <= period.id) & ((db.user_project.period + db.user_project.periods) > period.id))&\
(db.auth_membership.user_id == db.auth_user.id)&\
(db.auth_membership.group_id == db.auth_group.id)&\
(db.auth_group.role == 'Teacher')).select()
else:
return db((db.project.id == project_id)&\
(db.user_project.project == db.project.id)&\
(db.auth_user.id == db.user_project.assigned_user)&\
(db.user_project.period == period.id) &\
(db.auth_membership.user_id == db.auth_user.id)&\
(db.auth_membership.group_id == db.auth_group.id)&\
(db.auth_group.role == 'Teacher')).select()
def aux_in_courses(project_id):
import cpfecys
#period = cpfecys.current_year_period()
rowsi = db((db.period.id==db.period_detail.period)&(db.period_year.period==db.period.id)).select()
lst = []
for r in rowsi:
lst.append(r.period_year.id)
return db((db.project.id == project_id)&\
(db.user_project.project == db.project.id)&\
(db.auth_user.id == db.user_project.assigned_user)&\
((~db.user_project.period.belongs(lst))&(db.user_project.period <= period.id) & ((db.user_project.period + db.user_project.periods) > period.id))&\
(db.auth_membership.user_id == db.auth_user.id)&\
(db.auth_membership.group_id == db.auth_group.id)&\
(db.auth_group.role == 'Student')).select()
return dict(teachers_on_project = teachers_on_project,
aux_in_courses = aux_in_courses,
semester = period,
data = db((db.item.created == period)&
(db.item.item_restriction == item_restriction_id)&
(db.item.item_restriction == db.item_restriction.id)&
(db.item_restriction.is_public == True)&
((db.item_restriction.period == period)|(db.item_restriction.permanent == True))&
(db.item.assignation == db.user_project.id)&
(db.user_project.project == db.project.id)&
(db.user_project.project == db.project.id)&\
(db.auth_user.id == db.user_project.assigned_user)&\
(db.user_project.assignation_status == None)&\
(db.auth_membership.user_id == db.auth_user.id)&\
(db.auth_membership.group_id == db.auth_group.id)&\
(db.auth_group.role == 'Student')&
(db.item.id > 0)).select(orderby=db.project.name),
period =parameter_period)
def about_us():
return dict(message="about us")
def notification():
notification = request.vars['notification']
front_notification = db((db.front_notification.is_public == True)&\
(db.front_notification.id == notification)).select().first()
return dict(front_notification=front_notification,markmin_settings = cpfecys.get_markmin)
| 46.644211 | 302 | 0.586162 | 2,561 | 22,156 | 4.939086 | 0.178055 | 0.017076 | 0.019606 | 0.016602 | 0.430864 | 0.362163 | 0.315203 | 0.299549 | 0.283105 | 0.261602 | 0 | 0.003455 | 0.281414 | 22,156 | 474 | 303 | 46.742616 | 0.7899 | 0.067837 | 0 | 0.362018 | 0 | 0.005935 | 0.086049 | 0.010307 | 0 | 0 | 0 | 0.00211 | 0 | 0 | null | null | 0.050445 | 0.029674 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
f15f139fa9354be5d310a537e18612012d1f77d1 | 1,924 | py | Python | qa327_test/frontend/test_logout_page.py | jacob-seiler/seetgeek | 07affac0b06b8670911e348fc9d078bdf5242dc0 | [
"MIT"
] | null | null | null | qa327_test/frontend/test_logout_page.py | jacob-seiler/seetgeek | 07affac0b06b8670911e348fc9d078bdf5242dc0 | [
"MIT"
] | null | null | null | qa327_test/frontend/test_logout_page.py | jacob-seiler/seetgeek | 07affac0b06b8670911e348fc9d078bdf5242dc0 | [
"MIT"
] | 1 | 2021-01-02T19:44:53.000Z | 2021-01-02T19:44:53.000Z | import pytest
from seleniumbase import BaseCase
from qa327_test.conftest import base_url
from unittest.mock import patch
from qa327.models import User
from werkzeug.security import generate_password_hash, check_password_hash
"""
This file defines all unit tests for the logout page.
The tests will only test the frontend portion of the program, by patching the backend to return
specfic values. For example:
@patch('qa327.backend.get_user', return_value=test_user)
Will patch the backend get_user function (within the scope of the current test case)
so that it return 'test_user' instance below rather than reading
the user from the database.
Annotate @patch before unit tests can mock backend methods (for that testing function)
"""
# Moch a sample user
test_user = User(
email='tester0@gmail.com',
name='Tester Zero',
password='Password123',
balance=5000
)
class FrontEndLogoutPageTest(BaseCase):
def test_logout_invalidates(self, *_):
"""
Logout will invalidate the current session and redirect to the login page.
Test case ID: R7.1.1
"""
# Log in user using #email and #password
self.open(base_url + '/login')
self.type("#email", test_user.email)
self.type("#password", test_user.password)
self.click("#btn-submit")
# Open /logout
self.open(base_url + "/logout")
# Validate that current page contains #login-title
self.assert_element("#login-title")
def test_logout_restricts(self, *_):
"""
After logout, the user shouldn't be able to access restricted pages.
Test case ID: R7.1.2
"""
# Log out user (to invalidate any logged-in sessions that may exist)
self.open(base_url + "/logout")
# Open /
self.open(base_url)
# Validate that current page contains #login-title
self.assert_element("#login-title")
| 32.066667 | 95 | 0.690229 | 265 | 1,924 | 4.913208 | 0.456604 | 0.026882 | 0.036866 | 0.046083 | 0.156682 | 0.104455 | 0.104455 | 0.104455 | 0.104455 | 0.104455 | 0 | 0.015416 | 0.224532 | 1,924 | 59 | 96 | 32.610169 | 0.857239 | 0.220894 | 0 | 0.166667 | 1 | 0 | 0.119256 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 1 | 0.083333 | false | 0.125 | 0.25 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
f1782944c639b14d71ac08800edccf0760178af8 | 186 | py | Python | src/x_mlps/_version.py | PyriteAI/x-mlps | fcbba196384f16b742fccb8e82bd2adc467b1ef2 | [
"MIT"
] | 4 | 2021-12-29T03:04:58.000Z | 2022-01-13T13:07:43.000Z | src/x_mlps/_version.py | PyriteAI/x-mlps | fcbba196384f16b742fccb8e82bd2adc467b1ef2 | [
"MIT"
] | 1 | 2022-01-24T00:06:46.000Z | 2022-01-24T00:06:46.000Z | src/x_mlps/_version.py | PyriteAI/x-mlps | fcbba196384f16b742fccb8e82bd2adc467b1ef2 | [
"MIT"
] | 1 | 2022-02-24T10:17:17.000Z | 2022-02-24T10:17:17.000Z | from importlib.metadata import PackageNotFoundError, version
try:
__version__ = version("x_mlps")
except PackageNotFoundError:
# package is not installed
__version__ = None
| 23.25 | 60 | 0.768817 | 19 | 186 | 7.052632 | 0.789474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.172043 | 186 | 7 | 61 | 26.571429 | 0.87013 | 0.129032 | 0 | 0 | 0 | 0 | 0.0375 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
f1797974af0b1c517090e44934a9250630e99a4d | 324 | py | Python | company/signals/handlers.py | OnGridSystems/RobotVeraWebApp | 01cee658a3983fcaf128b40bb99c1a4272e90c07 | [
"MIT"
] | 11 | 2018-06-13T10:10:11.000Z | 2021-06-05T08:23:43.000Z | company/signals/handlers.py | OnGridSystems/RobotVeraWebApp | 01cee658a3983fcaf128b40bb99c1a4272e90c07 | [
"MIT"
] | 5 | 2020-06-05T18:24:25.000Z | 2022-03-11T23:21:01.000Z | company/signals/handlers.py | OnGridSystems/RobotVeraWebApp | 01cee658a3983fcaf128b40bb99c1a4272e90c07 | [
"MIT"
] | 5 | 2018-08-17T16:09:33.000Z | 2021-06-06T05:32:10.000Z | from django.db.models.signals import post_save
from django.dispatch import receiver
from company.models import Company
from company.tasks import deploy_new_company
@receiver(post_save, sender=Company)
def company_created(sender, instance, created, **kwargs):
if created:
deploy_new_company.delay(instance.id)
| 27 | 57 | 0.799383 | 45 | 324 | 5.6 | 0.488889 | 0.079365 | 0.126984 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126543 | 324 | 11 | 58 | 29.454545 | 0.890459 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.5 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
f18dc483d7521c03921392d10535b3718b96278d | 25,767 | py | Python | pysnmp/ALCATEL-IND1-IPX-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/ALCATEL-IND1-IPX-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/ALCATEL-IND1-IPX-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module ALCATEL-IND1-IPX-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ALCATEL-IND1-IPX-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:02:31 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
routingIND1Ipx, = mibBuilder.importSymbols("ALCATEL-IND1-BASE", "routingIND1Ipx")
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
ModuleIdentity, TimeTicks, Integer32, iso, Counter32, NotificationType, IpAddress, MibIdentifier, Unsigned32, ObjectIdentity, Bits, Counter64, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "TimeTicks", "Integer32", "iso", "Counter32", "NotificationType", "IpAddress", "MibIdentifier", "Unsigned32", "ObjectIdentity", "Bits", "Counter64", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
RowStatus, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "DisplayString", "TextualConvention")
alcatelIND1IPXMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1))
alcatelIND1IPXMIB.setRevisions(('2007-04-03 00:00',))
if mibBuilder.loadTexts: alcatelIND1IPXMIB.setLastUpdated('200704030000Z')
if mibBuilder.loadTexts: alcatelIND1IPXMIB.setOrganization('Alcatel-Lucent')
alcatelIND1IPXMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1))
class NetNumber(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(4, 4)
fixedLength = 4
class HostAddress(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(6, 6)
fixedLength = 6
alaIpxRoutingGroup = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1))
if mibBuilder.loadTexts: alaIpxRoutingGroup.setStatus('current')
alaIpxFilterGroup = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2))
if mibBuilder.loadTexts: alaIpxFilterGroup.setStatus('current')
alaIpxTimerGroup = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 3))
if mibBuilder.loadTexts: alaIpxTimerGroup.setStatus('current')
alaIpxStaticRouteTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 1), )
if mibBuilder.loadTexts: alaIpxStaticRouteTable.setStatus('current')
alaIpxStaticRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 1, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaIpxStaticRouteNetNum"))
if mibBuilder.loadTexts: alaIpxStaticRouteEntry.setStatus('current')
alaIpxStaticRouteNetNum = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 1, 1, 1), NetNumber().clone(hexValue="00000000"))
if mibBuilder.loadTexts: alaIpxStaticRouteNetNum.setStatus('current')
alaIpxStaticRouteNextHopNet = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 1, 1, 2), NetNumber().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxStaticRouteNextHopNet.setStatus('current')
alaIpxStaticRouteNextHopNode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 1, 1, 3), HostAddress().clone(hexValue="000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxStaticRouteNextHopNode.setStatus('current')
alaIpxStaticRouteTicks = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxStaticRouteTicks.setStatus('current')
alaIpxStaticRouteHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxStaticRouteHopCount.setStatus('current')
alaIpxStaticRouteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 1, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxStaticRouteRowStatus.setStatus('current')
alaIpxDefRouteTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 2), )
if mibBuilder.loadTexts: alaIpxDefRouteTable.setStatus('current')
alaIpxDefRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 2, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaIpxDefRouteVlanId"))
if mibBuilder.loadTexts: alaIpxDefRouteEntry.setStatus('current')
alaIpxDefRouteVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094)))
if mibBuilder.loadTexts: alaIpxDefRouteVlanId.setStatus('current')
alaIpxDefRouteNet = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 2, 1, 2), NetNumber().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxDefRouteNet.setStatus('current')
alaIpxDefRouteNode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 2, 1, 3), HostAddress().clone(hexValue="000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxDefRouteNode.setStatus('current')
alaIpxDefRouteRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 2, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxDefRouteRowStatus.setStatus('current')
alaIpxExtMsgTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 3), )
if mibBuilder.loadTexts: alaIpxExtMsgTable.setStatus('current')
alaIpxExtMsgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 3, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaIpxExtMsgVlanId"))
if mibBuilder.loadTexts: alaIpxExtMsgEntry.setStatus('current')
alaIpxExtMsgVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094)))
if mibBuilder.loadTexts: alaIpxExtMsgVlanId.setStatus('current')
alaIpxExtMsgMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxExtMsgMode.setStatus('current')
alaIpxExtMsgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 3, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxExtMsgRowStatus.setStatus('current')
alaIpxFlush = MibScalar((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("rip", 1), ("sap", 2), ("both", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaIpxFlush.setStatus('current')
alaIpxRipSapFilterTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1), )
if mibBuilder.loadTexts: alaIpxRipSapFilterTable.setStatus('current')
alaIpxRipSapFilterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterVlanId"), (0, "ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterType"), (0, "ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterNet"), (0, "ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterNetMask"), (0, "ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterNode"), (0, "ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterNodeMask"), (0, "ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterSvcType"))
if mibBuilder.loadTexts: alaIpxRipSapFilterEntry.setStatus('current')
alaIpxRipSapFilterVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094)))
if mibBuilder.loadTexts: alaIpxRipSapFilterVlanId.setStatus('current')
alaIpxRipSapFilterType = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("sapOutput", 1), ("sapInput", 2), ("gnsOutput", 3), ("ripOutput", 4), ("ripInput", 5))).clone(1))
if mibBuilder.loadTexts: alaIpxRipSapFilterType.setStatus('current')
alaIpxRipSapFilterNet = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 3), NetNumber().clone(hexValue="00000000"))
if mibBuilder.loadTexts: alaIpxRipSapFilterNet.setStatus('current')
alaIpxRipSapFilterNetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 4), NetNumber().clone(hexValue="ffffffff"))
if mibBuilder.loadTexts: alaIpxRipSapFilterNetMask.setStatus('current')
alaIpxRipSapFilterNode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 5), HostAddress().clone(hexValue="000000000000"))
if mibBuilder.loadTexts: alaIpxRipSapFilterNode.setStatus('current')
alaIpxRipSapFilterNodeMask = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 6), HostAddress().clone(hexValue="ffffffffffff"))
if mibBuilder.loadTexts: alaIpxRipSapFilterNodeMask.setStatus('current')
alaIpxRipSapFilterSvcType = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(65535))
if mibBuilder.loadTexts: alaIpxRipSapFilterSvcType.setStatus('current')
alaIpxRipSapFilterMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("allow", 1), ("block", 2))).clone('allow')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxRipSapFilterMode.setStatus('current')
alaIpxRipSapFilterRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 1, 1, 10), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxRipSapFilterRowStatus.setStatus('current')
alaIpxWatchdogSpoofTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 2), )
if mibBuilder.loadTexts: alaIpxWatchdogSpoofTable.setStatus('current')
alaIpxWatchdogSpoofEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 2, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaIpxWatchdogSpoofVlanId"))
if mibBuilder.loadTexts: alaIpxWatchdogSpoofEntry.setStatus('current')
alaIpxWatchdogSpoofVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094)))
if mibBuilder.loadTexts: alaIpxWatchdogSpoofVlanId.setStatus('current')
alaIpxWatchdogSpoofMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxWatchdogSpoofMode.setStatus('current')
alaIpxWatchdogSpoofRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 2, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxWatchdogSpoofRowStatus.setStatus('current')
alaIpxSerialFilterTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 3), )
if mibBuilder.loadTexts: alaIpxSerialFilterTable.setStatus('current')
alaIpxSerialFilterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 3, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaIpxSerialFilterVlanId"))
if mibBuilder.loadTexts: alaIpxSerialFilterEntry.setStatus('current')
alaIpxSerialFilterVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094)))
if mibBuilder.loadTexts: alaIpxSerialFilterVlanId.setStatus('current')
alaIpxSerialFilterMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxSerialFilterMode.setStatus('current')
alaIpxSerialFilterRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 3, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxSerialFilterRowStatus.setStatus('current')
alaSpxKeepaliveSpoofTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 4), )
if mibBuilder.loadTexts: alaSpxKeepaliveSpoofTable.setStatus('current')
alaSpxKeepaliveSpoofEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 4, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaSpxKeepaliveSpoofVlanId"))
if mibBuilder.loadTexts: alaSpxKeepaliveSpoofEntry.setStatus('current')
alaSpxKeepaliveSpoofVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094)))
if mibBuilder.loadTexts: alaSpxKeepaliveSpoofVlanId.setStatus('current')
alaSpxKeepaliveSpoofMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaSpxKeepaliveSpoofMode.setStatus('current')
alaSpxKeepaliveSpoofRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 4, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaSpxKeepaliveSpoofRowStatus.setStatus('current')
alaIpxType20Table = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 5), )
if mibBuilder.loadTexts: alaIpxType20Table.setStatus('current')
alaIpxType20Entry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 5, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaIpxType20VlanId"))
if mibBuilder.loadTexts: alaIpxType20Entry.setStatus('current')
alaIpxType20VlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094)))
if mibBuilder.loadTexts: alaIpxType20VlanId.setStatus('current')
alaIpxType20Mode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2))).clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxType20Mode.setStatus('current')
alaIpxType20RowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 2, 5, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxType20RowStatus.setStatus('current')
alaIpxTimerTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 3, 1), )
if mibBuilder.loadTexts: alaIpxTimerTable.setStatus('current')
alaIpxTimerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 3, 1, 1), ).setIndexNames((0, "ALCATEL-IND1-IPX-MIB", "alaIpxTimerVlanId"))
if mibBuilder.loadTexts: alaIpxTimerEntry.setStatus('current')
alaIpxTimerVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094)))
if mibBuilder.loadTexts: alaIpxTimerVlanId.setStatus('current')
alaIpxTimerSap = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 180)).clone(60)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxTimerSap.setStatus('current')
alaIpxTimerRip = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 180)).clone(60)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxTimerRip.setStatus('current')
alaIpxTimerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 1, 3, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: alaIpxTimerRowStatus.setStatus('current')
alcatelIND1IPXMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2))
alcatelIND1IPXMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 1))
alcatelIND1IPXMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2))
alcatelIND1IPXMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 1, 1)).setObjects(("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBStaticRouteGroup"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBDefRouteGroup"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBExtMsgGroup"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBFlushGroup"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBRipSapFilterGroup"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBWatchdogSpoofGroup"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBSerialFilterGroup"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBKeepaliveSpoofGroup"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBType20Group"), ("ALCATEL-IND1-IPX-MIB", "alcatelIND1IPXMIBTimerGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBCompliance = alcatelIND1IPXMIBCompliance.setStatus('current')
alcatelIND1IPXMIBStaticRouteGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 1)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxStaticRouteNextHopNet"), ("ALCATEL-IND1-IPX-MIB", "alaIpxStaticRouteNextHopNode"), ("ALCATEL-IND1-IPX-MIB", "alaIpxStaticRouteTicks"), ("ALCATEL-IND1-IPX-MIB", "alaIpxStaticRouteHopCount"), ("ALCATEL-IND1-IPX-MIB", "alaIpxStaticRouteRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBStaticRouteGroup = alcatelIND1IPXMIBStaticRouteGroup.setStatus('current')
alcatelIND1IPXMIBDefRouteGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 2)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxDefRouteNet"), ("ALCATEL-IND1-IPX-MIB", "alaIpxDefRouteNode"), ("ALCATEL-IND1-IPX-MIB", "alaIpxDefRouteRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBDefRouteGroup = alcatelIND1IPXMIBDefRouteGroup.setStatus('current')
alcatelIND1IPXMIBExtMsgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 3)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxExtMsgMode"), ("ALCATEL-IND1-IPX-MIB", "alaIpxExtMsgRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBExtMsgGroup = alcatelIND1IPXMIBExtMsgGroup.setStatus('current')
alcatelIND1IPXMIBFlushGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 4)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxFlush"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBFlushGroup = alcatelIND1IPXMIBFlushGroup.setStatus('current')
alcatelIND1IPXMIBRipSapFilterGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 5)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterMode"), ("ALCATEL-IND1-IPX-MIB", "alaIpxRipSapFilterRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBRipSapFilterGroup = alcatelIND1IPXMIBRipSapFilterGroup.setStatus('current')
alcatelIND1IPXMIBWatchdogSpoofGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 6)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxWatchdogSpoofMode"), ("ALCATEL-IND1-IPX-MIB", "alaIpxWatchdogSpoofRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBWatchdogSpoofGroup = alcatelIND1IPXMIBWatchdogSpoofGroup.setStatus('current')
alcatelIND1IPXMIBSerialFilterGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 7)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxSerialFilterMode"), ("ALCATEL-IND1-IPX-MIB", "alaIpxSerialFilterRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBSerialFilterGroup = alcatelIND1IPXMIBSerialFilterGroup.setStatus('current')
alcatelIND1IPXMIBKeepaliveSpoofGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 8)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaSpxKeepaliveSpoofMode"), ("ALCATEL-IND1-IPX-MIB", "alaSpxKeepaliveSpoofRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBKeepaliveSpoofGroup = alcatelIND1IPXMIBKeepaliveSpoofGroup.setStatus('current')
alcatelIND1IPXMIBType20Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 9)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxType20Mode"), ("ALCATEL-IND1-IPX-MIB", "alaIpxType20RowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBType20Group = alcatelIND1IPXMIBType20Group.setStatus('current')
alcatelIND1IPXMIBTimerGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 10, 8, 1, 2, 2, 10)).setObjects(("ALCATEL-IND1-IPX-MIB", "alaIpxTimerRip"), ("ALCATEL-IND1-IPX-MIB", "alaIpxTimerSap"), ("ALCATEL-IND1-IPX-MIB", "alaIpxTimerRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1IPXMIBTimerGroup = alcatelIND1IPXMIBTimerGroup.setStatus('current')
mibBuilder.exportSymbols("ALCATEL-IND1-IPX-MIB", alcatelIND1IPXMIBTimerGroup=alcatelIND1IPXMIBTimerGroup, alaIpxRipSapFilterNetMask=alaIpxRipSapFilterNetMask, alcatelIND1IPXMIBConformance=alcatelIND1IPXMIBConformance, alcatelIND1IPXMIBExtMsgGroup=alcatelIND1IPXMIBExtMsgGroup, alcatelIND1IPXMIBCompliances=alcatelIND1IPXMIBCompliances, HostAddress=HostAddress, alaSpxKeepaliveSpoofMode=alaSpxKeepaliveSpoofMode, alaIpxExtMsgEntry=alaIpxExtMsgEntry, alaIpxDefRouteNet=alaIpxDefRouteNet, alaIpxStaticRouteNetNum=alaIpxStaticRouteNetNum, alaIpxType20Table=alaIpxType20Table, alaIpxStaticRouteNextHopNet=alaIpxStaticRouteNextHopNet, alaIpxDefRouteRowStatus=alaIpxDefRouteRowStatus, PYSNMP_MODULE_ID=alcatelIND1IPXMIB, alaSpxKeepaliveSpoofRowStatus=alaSpxKeepaliveSpoofRowStatus, alaIpxStaticRouteTicks=alaIpxStaticRouteTicks, alaIpxTimerVlanId=alaIpxTimerVlanId, alcatelIND1IPXMIBWatchdogSpoofGroup=alcatelIND1IPXMIBWatchdogSpoofGroup, alaIpxTimerRowStatus=alaIpxTimerRowStatus, alaIpxWatchdogSpoofTable=alaIpxWatchdogSpoofTable, alaSpxKeepaliveSpoofVlanId=alaSpxKeepaliveSpoofVlanId, alaIpxRipSapFilterRowStatus=alaIpxRipSapFilterRowStatus, alcatelIND1IPXMIBSerialFilterGroup=alcatelIND1IPXMIBSerialFilterGroup, alcatelIND1IPXMIBStaticRouteGroup=alcatelIND1IPXMIBStaticRouteGroup, alaIpxRipSapFilterNodeMask=alaIpxRipSapFilterNodeMask, alaIpxTimerRip=alaIpxTimerRip, alaIpxTimerSap=alaIpxTimerSap, alaIpxRipSapFilterVlanId=alaIpxRipSapFilterVlanId, alaIpxRipSapFilterNode=alaIpxRipSapFilterNode, alaIpxSerialFilterVlanId=alaIpxSerialFilterVlanId, alaIpxRoutingGroup=alaIpxRoutingGroup, alaIpxStaticRouteHopCount=alaIpxStaticRouteHopCount, alcatelIND1IPXMIBType20Group=alcatelIND1IPXMIBType20Group, alaIpxTimerTable=alaIpxTimerTable, alaIpxType20VlanId=alaIpxType20VlanId, alaIpxWatchdogSpoofEntry=alaIpxWatchdogSpoofEntry, alaIpxFilterGroup=alaIpxFilterGroup, alaIpxRipSapFilterTable=alaIpxRipSapFilterTable, alaIpxRipSapFilterMode=alaIpxRipSapFilterMode, alcatelIND1IPXMIBObjects=alcatelIND1IPXMIBObjects, alaIpxSerialFilterTable=alaIpxSerialFilterTable, alaIpxDefRouteTable=alaIpxDefRouteTable, alaIpxType20Mode=alaIpxType20Mode, alaIpxRipSapFilterNet=alaIpxRipSapFilterNet, alaIpxType20RowStatus=alaIpxType20RowStatus, alaIpxTimerGroup=alaIpxTimerGroup, alaIpxStaticRouteRowStatus=alaIpxStaticRouteRowStatus, alaIpxSerialFilterEntry=alaIpxSerialFilterEntry, alaIpxDefRouteNode=alaIpxDefRouteNode, alcatelIND1IPXMIBFlushGroup=alcatelIND1IPXMIBFlushGroup, alaIpxRipSapFilterSvcType=alaIpxRipSapFilterSvcType, NetNumber=NetNumber, alaIpxExtMsgTable=alaIpxExtMsgTable, alcatelIND1IPXMIBGroups=alcatelIND1IPXMIBGroups, alaSpxKeepaliveSpoofEntry=alaSpxKeepaliveSpoofEntry, alaIpxExtMsgRowStatus=alaIpxExtMsgRowStatus, alaIpxTimerEntry=alaIpxTimerEntry, alaIpxStaticRouteNextHopNode=alaIpxStaticRouteNextHopNode, alcatelIND1IPXMIBDefRouteGroup=alcatelIND1IPXMIBDefRouteGroup, alcatelIND1IPXMIBCompliance=alcatelIND1IPXMIBCompliance, alaIpxStaticRouteEntry=alaIpxStaticRouteEntry, alaIpxSerialFilterMode=alaIpxSerialFilterMode, alaSpxKeepaliveSpoofTable=alaSpxKeepaliveSpoofTable, alaIpxDefRouteVlanId=alaIpxDefRouteVlanId, alaIpxRipSapFilterEntry=alaIpxRipSapFilterEntry, alaIpxWatchdogSpoofRowStatus=alaIpxWatchdogSpoofRowStatus, alaIpxExtMsgMode=alaIpxExtMsgMode, alaIpxSerialFilterRowStatus=alaIpxSerialFilterRowStatus, alcatelIND1IPXMIB=alcatelIND1IPXMIB, alaIpxRipSapFilterType=alaIpxRipSapFilterType, alaIpxDefRouteEntry=alaIpxDefRouteEntry, alaIpxStaticRouteTable=alaIpxStaticRouteTable, alcatelIND1IPXMIBKeepaliveSpoofGroup=alcatelIND1IPXMIBKeepaliveSpoofGroup, alaIpxWatchdogSpoofMode=alaIpxWatchdogSpoofMode, alcatelIND1IPXMIBRipSapFilterGroup=alcatelIND1IPXMIBRipSapFilterGroup, alaIpxFlush=alaIpxFlush, alaIpxExtMsgVlanId=alaIpxExtMsgVlanId, alaIpxWatchdogSpoofVlanId=alaIpxWatchdogSpoofVlanId, alaIpxType20Entry=alaIpxType20Entry)
| 137.058511 | 3,836 | 0.75131 | 3,001 | 25,767 | 6.450183 | 0.075641 | 0.014982 | 0.014568 | 0.015705 | 0.396084 | 0.351759 | 0.332851 | 0.322881 | 0.284858 | 0.275921 | 0 | 0.103516 | 0.09159 | 25,767 | 187 | 3,837 | 137.791444 | 0.723459 | 0.012962 | 0 | 0.073446 | 0 | 0 | 0.149347 | 0.039884 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.039548 | 0 | 0.084746 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
74bb773e96b796a32e9006fbdb28494ee5ebd765 | 1,893 | py | Python | krvjezivot/users/migrations/0003_users_donors.py | jkvakaric/krv-je-zivot | a875dcde468e1e5508713e592f974cb147223b52 | [
"MIT"
] | null | null | null | krvjezivot/users/migrations/0003_users_donors.py | jkvakaric/krv-je-zivot | a875dcde468e1e5508713e592f974cb147223b52 | [
"MIT"
] | null | null | null | krvjezivot/users/migrations/0003_users_donors.py | jkvakaric/krv-je-zivot | a875dcde468e1e5508713e592f974cb147223b52 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.5 on 2018-05-23 22:44
from django.db import migrations, models
import enumfields.fields
import krvjezivot.donations.enums
import krvjezivot.users.enums
class Migration(migrations.Migration):
dependencies = [
('users', '0002_full_name'),
]
operations = [
migrations.AddField(
model_name='user',
name='address',
field=models.CharField(blank=True, max_length=1024, verbose_name='address'),
),
migrations.AddField(
model_name='user',
name='blood_group',
field=enumfields.fields.EnumField(blank=True, enum=krvjezivot.donations.enums.BloodGroup, max_length=32, null=True, verbose_name='blood group'),
),
migrations.AddField(
model_name='user',
name='distance',
field=models.IntegerField(blank=True, null=True, verbose_name='distance to nearest donation venue'),
),
migrations.AddField(
model_name='user',
name='frequency',
field=models.FloatField(blank=True, null=True, verbose_name='average number of donations last year'),
),
migrations.AddField(
model_name='user',
name='last_donation_date',
field=models.DateField(blank=True, null=True, verbose_name='date of last donation'),
),
migrations.AddField(
model_name='user',
name='rhesus_factor',
field=enumfields.fields.EnumField(blank=True, enum=krvjezivot.donations.enums.RhesusFactor, max_length=32, null=True, verbose_name='rhesus factor'),
),
migrations.AddField(
model_name='user',
name='sex',
field=enumfields.fields.EnumField(blank=True, enum=krvjezivot.users.enums.Sex, max_length=32, null=True, verbose_name='sex'),
),
]
| 36.403846 | 160 | 0.621764 | 204 | 1,893 | 5.651961 | 0.333333 | 0.10928 | 0.139636 | 0.16392 | 0.525585 | 0.525585 | 0.240243 | 0.162186 | 0.116219 | 0.116219 | 0 | 0.020714 | 0.260433 | 1,893 | 51 | 161 | 37.117647 | 0.802857 | 0.023772 | 0 | 0.466667 | 1 | 0 | 0.131094 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.088889 | 0 | 0.155556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
74c637a2577e1df643e2a3b1180f5d79679865a0 | 2,057 | py | Python | pyccl/__init__.py | Russell-Jones-OxPhys/CCL | 1cdc4ecb8ae6fb23806540b39799cc3317473e71 | [
"BSD-3-Clause"
] | null | null | null | pyccl/__init__.py | Russell-Jones-OxPhys/CCL | 1cdc4ecb8ae6fb23806540b39799cc3317473e71 | [
"BSD-3-Clause"
] | null | null | null | pyccl/__init__.py | Russell-Jones-OxPhys/CCL | 1cdc4ecb8ae6fb23806540b39799cc3317473e71 | [
"BSD-3-Clause"
] | null | null | null | """The pyccl package contains all of the submodules that are implemented in
individual files in CCL.
"""
# flake8: noqa
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
# Sets the environment variable for default config path if it does not
# exist yet
from os import environ, path
if environ.get("CCL_PARAM_FILE") is None:
environ["CCL_PARAM_FILE"] = (
path.dirname(path.abspath(__file__)) + '/ccl_params.ini')
if environ.get("CLASS_PARAM_DIR") is None:
environ["CLASS_PARAM_DIR"] = path.dirname(path.abspath(__file__))
from . import ccllib as lib
from . import core, constants, background, power, massfunction, halomodel
# Core data structures
from .core import Cosmology
# Background cosmology functions and growth functions
from .background import growth_factor, growth_factor_unnorm, \
growth_rate, comoving_radial_distance, comoving_angular_distance, \
h_over_h0, luminosity_distance, distance_modulus, scale_factor_of_chi, \
omega_x, rho_x
# Power spectrum calculations and sigma8
from .power import linear_matter_power, nonlin_matter_power, sigmaR, \
sigmaV, sigma8
# Halo mass function
from .massfunction import massfunc, massfunc_m2r, sigmaM, halo_bias
# Cl's and tracers
from .cls import angular_cl, NumberCountsTracer, WeakLensingTracer, CMBLensingTracer
from .lsst_specs import bias_clustering, sigmaz_clustering, \
sigmaz_sources, dNdz_tomog, PhotoZFunction, PhotoZGaussian
# Useful constants and unit conversions
from .constants import CLIGHT_HMPC, MPC_TO_METER, PC_TO_METER, \
GNEWT, RHO_CRITICAL, SOLAR_MASS
from .correlation import correlation, correlation_3d
# Properties of haloes
from .halomodel import halomodel_matter_power, halo_concentration
# Specific to massive neutrinos
from .neutrinos import Omeganuh2, nu_masses
# Expose function to toggle debug mode
from .pyutils import debug_mode
from .errors import CCLError
| 33.177419 | 84 | 0.787555 | 269 | 2,057 | 5.762082 | 0.546468 | 0.02129 | 0.015484 | 0.028387 | 0.033548 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004002 | 0.149733 | 2,057 | 61 | 85 | 33.721311 | 0.882218 | 0.237725 | 0 | 0 | 0 | 0 | 0.047127 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.032258 | 0.516129 | 0 | 0.516129 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
74c877722010d45093123c6bb2302c0e0c62cc73 | 1,516 | py | Python | src/166. Fraction to Recurring Decimal.py | xiaonanln/myleetcode-python | 95d282f21a257f937cd22ef20c3590a69919e307 | [
"Apache-2.0"
] | null | null | null | src/166. Fraction to Recurring Decimal.py | xiaonanln/myleetcode-python | 95d282f21a257f937cd22ef20c3590a69919e307 | [
"Apache-2.0"
] | null | null | null | src/166. Fraction to Recurring Decimal.py | xiaonanln/myleetcode-python | 95d282f21a257f937cd22ef20c3590a69919e307 | [
"Apache-2.0"
] | null | null | null | import time
class Solution(object):
def fractionToDecimal(self, numerator, denominator):
"""
:type numerator: int
:type denominator: int
:rtype: str
"""
if numerator == 0: return '0'
if numerator < 0 and denominator < 0:
return self.fractionToDecimal(-numerator, -denominator)
elif numerator < 0 or denominator < 0:
return '-' + self.fractionToDecimal(abs(numerator), abs(denominator))
a, b = divmod(numerator, denominator)
if b == 0:
return str(a)
return str(a)+'.' + self.getDecimal(b, denominator)
def getDecimal(self, num, denominator):
res = ''
dot = False
visitedNums = {}
while num != 0:
if num in visitedNums:
# print 'repeat!', num, res[visitedNums[num]:]
repeatStart = visitedNums[num]
return res[:repeatStart] + '(' + res[repeatStart:] + ')'
visitedNums[num] = len(res)
num *= 10
if num < denominator:
res += '0'
else:
# print res, num, denominator
a, b = divmod(num, denominator)
res += str(a)
num = b
return res
print Solution().fractionToDecimal(2, 1)
print Solution().fractionToDecimal(1, 2)
print Solution().fractionToDecimal(2, 3)
print Solution().fractionToDecimal(4, 9)
print Solution().fractionToDecimal(4, 333) | 27.563636 | 81 | 0.53562 | 148 | 1,516 | 5.486486 | 0.297297 | 0.080049 | 0.184729 | 0.054187 | 0.096059 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023374 | 0.350923 | 1,516 | 55 | 82 | 27.563636 | 0.801829 | 0.047493 | 0 | 0 | 0 | 0 | 0.004454 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.029412 | null | null | 0.147059 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
74d381cc69d2e01f962aec382693cd60d7da90cd | 1,254 | py | Python | test/test_align.py | ishine/fac-via-ppg | 552e2c15046ff6f5e2656324e59d83a3bfe50e85 | [
"Apache-2.0"
] | 98 | 2019-06-27T17:00:01.000Z | 2022-03-10T10:48:18.000Z | test/test_align.py | itsclint/fac-via-ppg | 9f749039ee6d7f92ebc5ad261cdfb67035a85e38 | [
"Apache-2.0"
] | 16 | 2019-09-09T03:21:16.000Z | 2021-11-03T01:55:26.000Z | test/test_align.py | itsclint/fac-via-ppg | 9f749039ee6d7f92ebc5ad261cdfb67035a85e38 | [
"Apache-2.0"
] | 36 | 2019-09-06T09:07:42.000Z | 2021-12-20T13:54:47.000Z | # Copyright 2018 Guanlong Zhao
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from common import align
from textgrid import TextGrid
class TestAligner(unittest.TestCase):
def setUp(self):
self.tg_file = "data/test.TextGrid"
def tearDown(self):
pass
def test_write_tg_to_str(self):
tg = TextGrid()
tg.read(self.tg_file, 2)
tg_str = align.write_tg_to_str(tg)
self.assertTrue(tg_str.startswith('File type = "ooTextFile"'))
def test_read_tg_from_str(self):
with open(self.tg_file, 'r') as reader:
tg_str = reader.read()
tg = align.read_tg_from_str(tg_str)
self.assertTrue(isinstance(tg, TextGrid))
self.assertEqual(len(tg.tiers), 2)
| 32.153846 | 74 | 0.704944 | 186 | 1,254 | 4.639785 | 0.505376 | 0.069525 | 0.034762 | 0.03708 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010091 | 0.209729 | 1,254 | 38 | 75 | 33 | 0.860747 | 0.437799 | 0 | 0 | 0 | 0 | 0.06196 | 0 | 0 | 0 | 0 | 0 | 0.157895 | 1 | 0.210526 | false | 0.052632 | 0.157895 | 0 | 0.421053 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
2d0be59b7bb1d28debf0ba11e1837d4e4a08540c | 2,637 | py | Python | slash/utils/__init__.py | kbh2o/slash | 532b7e3acdf46103ece5b86f21c29f9b58587289 | [
"BSD-3-Clause"
] | 70 | 2015-12-05T12:33:10.000Z | 2022-03-03T04:56:58.000Z | slash/utils/__init__.py | kbh2o/slash | 532b7e3acdf46103ece5b86f21c29f9b58587289 | [
"BSD-3-Clause"
] | 711 | 2015-10-06T11:01:48.000Z | 2022-02-09T12:40:47.000Z | slash/utils/__init__.py | kbh2o/slash | 532b7e3acdf46103ece5b86f21c29f9b58587289 | [
"BSD-3-Clause"
] | 37 | 2015-10-13T11:00:51.000Z | 2022-02-08T07:28:11.000Z | import functools
from ..ctx import context
from ..core.markers import repeat_marker
from ..core import requirements
from ..exceptions import SkipTest
def skip_test(*args):
"""
Skips the current test execution by raising a :class:`slash.exceptions.SkipTest`
exception. It can optionally receive a reason argument.
"""
raise SkipTest(*args)
def repeat(num_repetitions):
"""
Marks a test to be repeated multiple times when run
"""
return repeat_marker(num_repetitions)
def skipped(thing, reason=None):
"""
A decorator for skipping methods and classes
"""
if isinstance(thing, str):
return functools.partial(skipped, reason=thing)
return requirements.requires(requirements.Skip(reason))(thing)
def register_skip_exception(exception_type):
"""
Registers a custom exception type to be recognized a test skip. This makes the exception
behave just as if the test called ``skip_test``
.. note:: this must be called within an active session
"""
context.session.register_skip_exception(exception_type)
def add_error(msg=None, frame_correction=0, exc_info=None):
"""
Adds an error to the current test result
:param msg: can be either an object or a string representing a message
:param frame_correction: when delegating add_error from another function, specifies
the amount of frames to skip to reach the actual cause of the added error
:param exc_info: (optional) - the exc_info tuple of the exception being recorded
"""
if context.session is not None:
return context.session.results.current.add_error(msg, frame_correction=frame_correction+1, exc_info=exc_info)
def add_failure(msg=None, frame_correction=0, exc_info=None):
"""
Adds a failure to the current test result
:param msg: can be either an object or a string representing a message
:param frame_correction: when delegating add_failure from another function, specifies
the amount of frames to skip to reach the actual cause of the added failure
"""
if context.session is not None:
return context.session.results.current.add_failure(msg, frame_correction=frame_correction+1, exc_info=exc_info)
def set_test_detail(key, value):
"""
Store an object providing additional information about the current running test in a certain key.
Each test has its own storage.
:param key: a hashable object
:param value: can be either an object or a string representing additional details
"""
if context.session is not None:
context.session.results.current.set_test_detail(key, value)
| 34.246753 | 119 | 0.732651 | 376 | 2,637 | 5.037234 | 0.348404 | 0.063358 | 0.022175 | 0.020591 | 0.459873 | 0.401795 | 0.388596 | 0.388596 | 0.388596 | 0.32735 | 0 | 0.001889 | 0.197194 | 2,637 | 76 | 120 | 34.697368 | 0.892773 | 0.492984 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.291667 | false | 0 | 0.208333 | 0 | 0.708333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
2d381c92d99c3d97332feb4ffb198a371008ed12 | 2,945 | py | Python | api/migrations/0001_initial.py | backdev96/yamdb_final | ba61132adca74952eded09d0218ded6e234af9ee | [
"Apache-2.0"
] | null | null | null | api/migrations/0001_initial.py | backdev96/yamdb_final | ba61132adca74952eded09d0218ded6e234af9ee | [
"Apache-2.0"
] | 3 | 2021-06-05T00:11:14.000Z | 2021-09-22T19:57:18.000Z | api/migrations/0001_initial.py | backdev96/api_yamdb | 36210130bde3d6c7e5a7cbce6ab978b66c510067 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.5 on 2020-12-07 16:56
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', models.SlugField(unique=True)),
],
),
migrations.CreateModel(
name='Genre',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', models.SlugField(unique=True)),
],
),
migrations.CreateModel(
name='Title',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('year', models.IntegerField()),
('rating', models.IntegerField(blank=True, null=True)),
('description', models.TextField(max_length=200)),
('category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='api.Category')),
('genre', models.ManyToManyField(to='api.Genre')),
],
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField(max_length=255)),
('score', models.IntegerField(default=0)),
('pub_date', models.DateTimeField(auto_now_add=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('title', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='api.Title')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField(max_length=255)),
('pub_date', models.DateTimeField(auto_now_add=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('review', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='api.Review')),
],
),
]
| 43.955224 | 133 | 0.582003 | 298 | 2,945 | 5.614094 | 0.278523 | 0.033473 | 0.050209 | 0.0789 | 0.633592 | 0.633592 | 0.61566 | 0.61566 | 0.61566 | 0.61566 | 0 | 0.01591 | 0.274363 | 2,945 | 66 | 134 | 44.621212 | 0.766963 | 0.01528 | 0 | 0.610169 | 1 | 0 | 0.073154 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.050847 | 0 | 0.118644 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
2d464bb2b382adc56b7664d71eeadee804346536 | 1,056 | py | Python | stock_news_beta/stock_news_beta/apps/focus/migrations/0004_auto_20190125_1724.py | zws910/stock-news | dcf90d25a36a903e44ddfd04bb655f665b692b36 | [
"MIT"
] | null | null | null | stock_news_beta/stock_news_beta/apps/focus/migrations/0004_auto_20190125_1724.py | zws910/stock-news | dcf90d25a36a903e44ddfd04bb655f665b692b36 | [
"MIT"
] | null | null | null | stock_news_beta/stock_news_beta/apps/focus/migrations/0004_auto_20190125_1724.py | zws910/stock-news | dcf90d25a36a903e44ddfd04bb655f665b692b36 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2019-01-25 09:24
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('focus', '0003_auto_20190125_1721'),
]
operations = [
migrations.AlterModelOptions(
name='article',
options={'verbose_name': '文章', 'verbose_name_plural': '文章'},
),
migrations.AlterModelOptions(
name='author',
options={'verbose_name': '作者', 'verbose_name_plural': '作者'},
),
migrations.AlterModelOptions(
name='column',
options={'ordering': ['name'], 'verbose_name': '类别', 'verbose_name_plural': '类别'},
),
migrations.AlterModelOptions(
name='comment',
options={'verbose_name': '评论', 'verbose_name_plural': '评论'},
),
migrations.AlterModelOptions(
name='poll',
options={'verbose_name': '点赞', 'verbose_name_plural': '点赞'},
),
]
| 29.333333 | 94 | 0.568182 | 98 | 1,056 | 5.887755 | 0.469388 | 0.190641 | 0.268631 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044914 | 0.283144 | 1,056 | 35 | 95 | 30.171429 | 0.717305 | 0.065341 | 0 | 0.357143 | 1 | 0 | 0.248984 | 0.023374 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.178571 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7437242aebcdc39142761605d71fd8ac6df32f75 | 348 | py | Python | venv/lib/python3.8/site-packages/azureml/_tracing/__init__.py | amcclead7336/Enterprise_Data_Science_Final | ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28 | [
"Unlicense",
"MIT"
] | null | null | null | venv/lib/python3.8/site-packages/azureml/_tracing/__init__.py | amcclead7336/Enterprise_Data_Science_Final | ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28 | [
"Unlicense",
"MIT"
] | null | null | null | venv/lib/python3.8/site-packages/azureml/_tracing/__init__.py | amcclead7336/Enterprise_Data_Science_Final | ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28 | [
"Unlicense",
"MIT"
] | 2 | 2021-05-23T16:46:31.000Z | 2021-05-26T23:51:09.000Z | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from azureml._base_sdk_common import __version__ as VERSION
from ._tracer_factory import get_tracer
__version__ = VERSION
__all__ = [
'get_tracer'
]
| 26.769231 | 60 | 0.488506 | 27 | 348 | 5.592593 | 0.666667 | 0.119205 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.140805 | 348 | 12 | 61 | 29 | 0.505017 | 0.497126 | 0 | 0 | 0 | 0 | 0.062893 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
744939994e227d0b6b7c94dcb8010fec20ecb0bf | 267 | py | Python | elm-finder/apps/homepage/admin.py | martin-jahn/elm-finder | 7510e38d52eebaf462ae5c5ce961e4884b1709bd | [
"MIT"
] | 2 | 2019-04-28T21:32:46.000Z | 2019-05-13T05:27:09.000Z | elm-finder/apps/homepage/admin.py | martin-jahn/elm-finder | 7510e38d52eebaf462ae5c5ce961e4884b1709bd | [
"MIT"
] | null | null | null | elm-finder/apps/homepage/admin.py | martin-jahn/elm-finder | 7510e38d52eebaf462ae5c5ce961e4884b1709bd | [
"MIT"
] | null | null | null | from django.contrib import admin
from apps.homepage.models import PSA, Dpotw, Gotw
@admin.register(Dpotw)
class DpotwAdmin(admin.ModelAdmin):
raw_id_fields = ("package",)
@admin.register(Gotw)
class GotwAdmin(admin.ModelAdmin):
raw_id_fields = ("grid",)
| 19.071429 | 49 | 0.745318 | 35 | 267 | 5.571429 | 0.6 | 0.133333 | 0.184615 | 0.205128 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.131086 | 267 | 13 | 50 | 20.538462 | 0.840517 | 0 | 0 | 0 | 0 | 0 | 0.041199 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7454293e13312ee705f2fc3945097505c0c97567 | 946 | py | Python | project/evaluate/views.py | ktzoulas/stateless-password-manager | c2b8a5cad878758a924a5d8550e5b80b50d7a60b | [
"MIT"
] | null | null | null | project/evaluate/views.py | ktzoulas/stateless-password-manager | c2b8a5cad878758a924a5d8550e5b80b50d7a60b | [
"MIT"
] | null | null | null | project/evaluate/views.py | ktzoulas/stateless-password-manager | c2b8a5cad878758a924a5d8550e5b80b50d7a60b | [
"MIT"
] | 1 | 2019-10-03T14:24:07.000Z | 2019-10-03T14:24:07.000Z | """
Contains the views of the 'evaluate' blueprint.
"""
# pylint: disable=invalid-name
from flask import Blueprint, render_template, request
from project.evaluate.forms import EvaluateForm
from project.evaluate.helpers import evaluate_pass
evaluate_blueprint = Blueprint('evaluate', __name__, url_prefix='/evaluate')
@evaluate_blueprint.route('/', methods=['GET', 'POST'])
def index():
""" TODO: add function docstring"""
power = None
form = EvaluateForm(request.form)
if form.validate_on_submit():
if form.password.data is not None and form.password.data != '':
power = evaluate_pass(form.password.data)
return render_template('evaluate/index.html', form=form, power=power,
breadcrumb=(('Home', 'main.index'), 'Evaluate'))
return render_template('evaluate/index.html', form=form,
breadcrumb=(('Home', 'main.index'), 'Evaluate'))
| 35.037037 | 79 | 0.664905 | 107 | 946 | 5.747664 | 0.46729 | 0.082927 | 0.078049 | 0.091057 | 0.247154 | 0.146341 | 0.146341 | 0.146341 | 0 | 0 | 0 | 0 | 0.200846 | 946 | 26 | 80 | 36.384615 | 0.813492 | 0.112051 | 0 | 0.133333 | 0 | 0 | 0.13017 | 0 | 0 | 0 | 0 | 0.038462 | 0 | 1 | 0.066667 | false | 0.2 | 0.2 | 0 | 0.4 | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.