hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9345a6527e2e2f281b99f01f1091aa1934b40359 | 605 | py | Python | pub_site/src/pub_site/config/beta.py | webee/pay | b48c6892686bf3f9014bb67ed119506e41050d45 | [
"W3C"
] | 1 | 2019-10-14T11:51:49.000Z | 2019-10-14T11:51:49.000Z | pub_site/src/pub_site/config/beta.py | webee/pay | b48c6892686bf3f9014bb67ed119506e41050d45 | [
"W3C"
] | null | null | null | pub_site/src/pub_site/config/beta.py | webee/pay | b48c6892686bf3f9014bb67ed119506e41050d45 | [
"W3C"
] | null | null | null | # coding=utf-8
class App:
TESTING = True
# ReverseProxy.HTTP_X_SCRIPT_NAME='/__'
HOST_URL = 'http://pay.lvye.com'
class Checkout:
ZYT_MAIN_PAGE = 'http://pay.lvye.com/__/main'
VALID_NETLOCS = ['pay.lvye.com']
AES_KEY = "2HF5UKPIADDYBHDSKOVP9GMA80MU2IV2"
PAYMENT_CHECKOUT_VALID_SECONDS = 1 * 60 * 60
class LvyePaySitePayClientConfig:
ROOT_URL = "http://pay.lvye.com/api/__"
CHECKOUT_URL = 'http://pay.lvye.com/__/checkout/{sn}'
class LvyeCorpPaySitePayClientConfig:
ROOT_URL = "http://pay.lvye.com/api/__"
CHECKOUT_URL = 'http://pay.lvye.com/__/checkout/{sn}'
| 22.407407 | 57 | 0.697521 | 77 | 605 | 5.12987 | 0.454545 | 0.124051 | 0.177215 | 0.212658 | 0.341772 | 0.298734 | 0.298734 | 0.298734 | 0.298734 | 0.298734 | 0 | 0.025341 | 0.152066 | 605 | 26 | 58 | 23.269231 | 0.744639 | 0.082645 | 0 | 0.285714 | 0 | 0 | 0.387681 | 0.057971 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0.928571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
934eccc767ff4916556ee1ef0e61833828628888 | 751 | py | Python | players/utils.py | taqpol/discord-bot | b76f73e3926fc8f6c26240b74df1ab8f030214f5 | [
"Unlicense"
] | null | null | null | players/utils.py | taqpol/discord-bot | b76f73e3926fc8f6c26240b74df1ab8f030214f5 | [
"Unlicense"
] | null | null | null | players/utils.py | taqpol/discord-bot | b76f73e3926fc8f6c26240b74df1ab8f030214f5 | [
"Unlicense"
] | null | null | null | import requests
import os
API_KEY = os.environ.get('API_KEY')
headers = {'Authorization': API_KEY, 'Accept':'application/vnd.api+json'}
def get_player_elo(player_info):
return player_info.json()['data'][0]['attributes']['stats']['rankPoints']
def retrieve_player_info(player_name):
if ''.join(list(filter(lambda x: x.isalpha() or x == '_', list(player_name)))) != player_name:
response = requests.Response()
response.status_code = 400
response.reason = 'Bad Request'
return response
else:
return requests.get('https://api.dc01.gamelockerapp.com/shards/na/players/',
headers=headers, params={'filter[playerNames]':'{}'.format(player_name)})
def retrieve_match_telemetry(match_id):
pass
def retrieve_match_data(match_id):
pass | 26.821429 | 95 | 0.733688 | 103 | 751 | 5.145631 | 0.533981 | 0.075472 | 0.060377 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008915 | 0.103862 | 751 | 28 | 96 | 26.821429 | 0.778603 | 0 | 0 | 0.105263 | 0 | 0 | 0.219415 | 0.031915 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0.105263 | 0.105263 | 0.052632 | 0.473684 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
935684d2c79442ba8513912e104b4d9ce4d4f7c0 | 634 | py | Python | psana/psana/psexp/serial_ds.py | AntoineDujardin/lcls2 | 8b9d2815497fbbabb4d37800fd86a7be1728b552 | [
"BSD-3-Clause-LBNL"
] | null | null | null | psana/psana/psexp/serial_ds.py | AntoineDujardin/lcls2 | 8b9d2815497fbbabb4d37800fd86a7be1728b552 | [
"BSD-3-Clause-LBNL"
] | null | null | null | psana/psana/psexp/serial_ds.py | AntoineDujardin/lcls2 | 8b9d2815497fbbabb4d37800fd86a7be1728b552 | [
"BSD-3-Clause-LBNL"
] | null | null | null | from .ds_base import DataSourceBase
from psana.psexp.run import RunSerial
class SerialDataSource(DataSourceBase):
def __init__(self, *args, **kwargs):
super(SerialDataSource, self).__init__(**kwargs)
self.exp, self.run_dict = self.setup_xtcs()
class Factory:
def create(self, *args, **kwargs): return SerialDataSource(*args, **kwargs)
def runs(self):
for run_no in self.run_dict:
yield RunSerial(self.exp, run_no, self.run_dict[run_no], \
max_events=self.max_events, batch_size=self.batch_size, \
filter_callback=self.filter)
| 35.222222 | 83 | 0.652997 | 78 | 634 | 5.038462 | 0.448718 | 0.076336 | 0.083969 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.239748 | 634 | 17 | 84 | 37.294118 | 0.815353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.153846 | 0.076923 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
935bde7b66a3bdd728a54064990d14ed5e07e862 | 140 | py | Python | python/11399.py | zheedong/BaekJoon | 7f9e00085276a337d18ee3bb90c98126f7af4d3a | [
"MIT"
] | null | null | null | python/11399.py | zheedong/BaekJoon | 7f9e00085276a337d18ee3bb90c98126f7af4d3a | [
"MIT"
] | null | null | null | python/11399.py | zheedong/BaekJoon | 7f9e00085276a337d18ee3bb90c98126f7af4d3a | [
"MIT"
] | null | null | null | n = int(input())
p_list = sorted(list(map(int, input().split())))
ret = 0
for i in range(n):
ret += (n-i) * p_list[i]
print(ret) | 14 | 48 | 0.55 | 26 | 140 | 2.884615 | 0.576923 | 0.213333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009174 | 0.221429 | 140 | 10 | 49 | 14 | 0.678899 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
937263c43ea5d8f8ecdcbf404316569afc6ddd56 | 14,943 | py | Python | pysnmp-with-texts/ENTERASYS-ACTIVATION-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/ENTERASYS-ACTIVATION-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/ENTERASYS-ACTIVATION-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module ENTERASYS-ACTIVATION-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ENTERASYS-ACTIVATION-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:03:07 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
etsysModules, = mibBuilder.importSymbols("ENTERASYS-MIB-NAMES", "etsysModules")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup")
NotificationType, Unsigned32, Gauge32, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, TimeTicks, Integer32, Counter64, ModuleIdentity, Counter32, iso, MibIdentifier, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "Unsigned32", "Gauge32", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "TimeTicks", "Integer32", "Counter64", "ModuleIdentity", "Counter32", "iso", "MibIdentifier", "IpAddress")
RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "DisplayString")
etsysActivationMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999))
etsysActivationMIB.setRevisions(('2002-04-18 14:54',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: etsysActivationMIB.setRevisionsDescriptions(('The initial version of this MIB module.',))
if mibBuilder.loadTexts: etsysActivationMIB.setLastUpdated('200204181454Z')
if mibBuilder.loadTexts: etsysActivationMIB.setOrganization('Enterasys Networks, Inc')
if mibBuilder.loadTexts: etsysActivationMIB.setContactInfo('Postal: Enterasys Networks, Inc. 500 Spaulding Turnpike P.O. Box 3060 Portsmouth, NH 03801 Phone: +1 603 501 5500 E-mail: support@enterasys.com WWW: http://www.enterasys.com')
if mibBuilder.loadTexts: etsysActivationMIB.setDescription("This MIB module defines a portion of the SNMP enterprise MIBs under Enterasys Networks' enterprise OID pertaining to configuration of product activation keys.")
etsysActivationObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1))
class EnterasysKeyType(TextualConvention, Integer32):
description = 'A value of this type indicates whether an activation key is a product key, or whether it is a special kind of key such as a demonstration key. noKey(1) Indicates that no key is configured. unknownKeyType(2) Indicates that a key is configured, but that the agent has no idea what type of key it is. productKey(3) Indicates that a product key is configured. demoKey(4) Indicates that a demonstration key is configured. Demonstration keys intended for customer use will typically have expirations or other restrictions.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("noKey", 1), ("unknownKeyType", 2), ("productKey", 3), ("demoKey", 4))
class EnterasysFeature(TextualConvention, Integer32):
description = 'A value of this type identifies an optional feature for which an activation key may be bought or obtained. This enumeration type will be extended as necessary.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("dot1xAuthentication", 1), ("pointToMultipoint", 2))
etsysActivationBaseBranch = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1))
etsysMaxActivationKeyRow = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysMaxActivationKeyRow.setStatus('current')
if mibBuilder.loadTexts: etsysMaxActivationKeyRow.setDescription('The largest value that the agent supports for the index object etsysActivationKeyRow.')
etsysActivationKeyTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 2), )
if mibBuilder.loadTexts: etsysActivationKeyTable.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyTable.setDescription('This table contains activation keys for optional features.')
etsysActivationKeyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 2, 1), ).setIndexNames((0, "ENTERASYS-ACTIVATION-MIB", "etsysActivationKeyRow"))
if mibBuilder.loadTexts: etsysActivationKeyEntry.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyEntry.setDescription('Each valid conceptual row contains basic information about one product activation key. Only those rows for which the etsysActivationKeyStatus is active(1) may enable features. Note that it is possible for an active(1) row to contain a well-formatted, internally-consistent key that has expired. A managed system is under no obligation to enable features in response to the presence of expired keys.')
etsysActivationKeyRow = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: etsysActivationKeyRow.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyRow.setDescription('An index that uniquely identifies a row in the product key table. Agents are not required to accept arbitrary indices -- they may limit indices to numbers in the range (1 - N), where N is defined as the maximum number of activation keys that can usefully be supported on a product.')
etsysActivationLicenseString = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 2, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysActivationLicenseString.setStatus('current')
if mibBuilder.loadTexts: etsysActivationLicenseString.setDescription("A place for human-readable administrative information associated with this activation key, such as a product serial number or a demo key's registration date. Some key formats may require entry of 'License String' values provided by the vendor. Agents may enforce the following rule with respect to such paired-key rows: ------------------------------------------------------- This object MUST be set before etsysActivationKeyStatus can become active(1), and MAY NOT be modified while etsysActivationKeyStatus is active(1). -------------------------------------------------------")
etsysActivationKeyValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 2, 1, 3), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysActivationKeyValue.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyValue.setDescription("An activation key. The activation key must be coded as a string of printable characters. Spaces and hyphens are reserved punctuation characters. They may be used freely on input and output, and do not form part of the key value. (The agent is not required to preserve these or other non-essential aspects of the key formatting.) The key must conform to one of the meta-formats defined in this DESCRIPTION. These meta-formats are subject to change. Agents should validate activation keys at Set time. An agent may reject even a valid key if it is inapplicable to the managed device. This object MUST be set before etsysActivationKeyStatus can become active(1), and MAY NOT be modified while etsysActivationKeyStatus is active(1). ======================================================= Standard activation keys have the following format: <FormatCode> <OpaqueKey> The <FormatCode> is encoded as four hexadecimal digits, and identifies the format of the <OpaqueKey>. The <OpaqueKey> may be encoded in any fashion the agent likes, within the constraints mentioned earlier in this DESCRIPTION. ======================================================= A platform may accept keys of the format <Keyword> [Qualifiers] provided that there is no possibility of confusion with standard activation keys. This format is best suited to non-secret demo keys that are intended for a wide audience ('everyone reading the user manual'). ======================================================= Backwards compatibility example Task : Configure an existing RoamAbout AccessPoint 2000 P-MP activation key, using this MIB. <OpaqueKey> : XXXX-XXXX-XXXX-XXXX (existing key) <FormatCode> : 0001 You enter : 0001-XXXX-XXXX-XXXX-XXXX =======================================================")
etsysActivationKeyType = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 2, 1, 4), EnterasysKeyType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysActivationKeyType.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyType.setDescription('Derived from the activation key. Identifies the type of key (product key vs. demonstration key). Identification of existing demonstration keys may not be 100% accurate.')
etsysActivationKeyStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysActivationKeyStatus.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyStatus.setDescription('Supports creation, deletion, and activation of rows in the etsysActivationKeyTable. An instance of this variable may become active(1) only when there is a corresponding etsysActivationKeyValue. For some key formats, the etsysActivationLicenseString may need to be set to a matching vendor-supplied value. Note that a row with an active(1) status may contain a demo key that has expired, and that no longer provides access to any features.')
etsysActivationKeyFeatureTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 3), )
if mibBuilder.loadTexts: etsysActivationKeyFeatureTable.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyFeatureTable.setDescription("This table indicates which optional feature or features each activation key enables. Rows only appear in this table for 'etsysActivationKeyValue' instances that contain recognizable key values.")
etsysActivationKeyFeatureEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 3, 1), ).setIndexNames((0, "ENTERASYS-ACTIVATION-MIB", "etsysActivationKeyRow"), (0, "ENTERASYS-ACTIVATION-MIB", "etsysActivationKeyFeature"))
if mibBuilder.loadTexts: etsysActivationKeyFeatureEntry.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyFeatureEntry.setDescription('Each valid conceptual row indicates the existence of a known mapping between an activation key and an optional feature.')
etsysActivationKeyFeature = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 3, 1, 1), EnterasysFeature())
if mibBuilder.loadTexts: etsysActivationKeyFeature.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyFeature.setDescription('Identifies one of the optional product features enabled by an activation key in the etsysActivationKeyTable.')
etsysActivationKeyRestrictions = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 1, 1, 3, 1, 2), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysActivationKeyRestrictions.setStatus('current')
if mibBuilder.loadTexts: etsysActivationKeyRestrictions.setDescription("If the activation key associated with this row is a demo key, this MIB object may contain a human-readable string describing the key's restrictions, expiration conditions, and/or status. A demo key that enables several features could, at least theoretically, have different conditions for each. Platforms may automatically enforce expirations, but are not guaranteed to do so. It is ultimately the system manager's responsibility to clean up expired keys. For a key that has no restrictions, this object's value may consist of the empty string, or of whitespace.")
etsysActivationConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 2))
etsysActivationGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 2, 1))
etsysActivationCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 2, 2))
etsysActivationBaseGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 2, 1, 1)).setObjects(("ENTERASYS-ACTIVATION-MIB", "etsysMaxActivationKeyRow"), ("ENTERASYS-ACTIVATION-MIB", "etsysActivationLicenseString"), ("ENTERASYS-ACTIVATION-MIB", "etsysActivationKeyValue"), ("ENTERASYS-ACTIVATION-MIB", "etsysActivationKeyType"), ("ENTERASYS-ACTIVATION-MIB", "etsysActivationKeyStatus"), ("ENTERASYS-ACTIVATION-MIB", "etsysActivationKeyRestrictions"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysActivationBaseGroup = etsysActivationBaseGroup.setStatus('current')
if mibBuilder.loadTexts: etsysActivationBaseGroup.setDescription('A collection of objects for configuring activation keys for optional platform features.')
etsysActivationCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 99999, 2, 2, 1)).setObjects(("ENTERASYS-ACTIVATION-MIB", "etsysActivationBaseGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysActivationCompliance = etsysActivationCompliance.setStatus('current')
if mibBuilder.loadTexts: etsysActivationCompliance.setDescription('The compliance statement for devices that support the Enterasys Product Activation MIB.')
mibBuilder.exportSymbols("ENTERASYS-ACTIVATION-MIB", etsysActivationMIB=etsysActivationMIB, etsysActivationConformance=etsysActivationConformance, etsysActivationKeyRestrictions=etsysActivationKeyRestrictions, etsysActivationKeyValue=etsysActivationKeyValue, etsysActivationCompliance=etsysActivationCompliance, EnterasysFeature=EnterasysFeature, etsysActivationCompliances=etsysActivationCompliances, etsysActivationKeyRow=etsysActivationKeyRow, etsysActivationKeyFeatureTable=etsysActivationKeyFeatureTable, etsysActivationKeyEntry=etsysActivationKeyEntry, etsysActivationGroups=etsysActivationGroups, etsysActivationKeyTable=etsysActivationKeyTable, etsysActivationKeyFeatureEntry=etsysActivationKeyFeatureEntry, etsysActivationObjects=etsysActivationObjects, etsysActivationBaseGroup=etsysActivationBaseGroup, etsysActivationKeyStatus=etsysActivationKeyStatus, etsysActivationKeyFeature=etsysActivationKeyFeature, EnterasysKeyType=EnterasysKeyType, PYSNMP_MODULE_ID=etsysActivationMIB, etsysActivationBaseBranch=etsysActivationBaseBranch, etsysActivationKeyType=etsysActivationKeyType, etsysMaxActivationKeyRow=etsysMaxActivationKeyRow, etsysActivationLicenseString=etsysActivationLicenseString)
| 169.806818 | 1,827 | 0.781102 | 1,758 | 14,943 | 6.638225 | 0.25256 | 0.005313 | 0.055784 | 0.006855 | 0.249614 | 0.162211 | 0.152271 | 0.142502 | 0.123308 | 0.109083 | 0 | 0.046124 | 0.104798 | 14,943 | 87 | 1,828 | 171.758621 | 0.826269 | 0.022887 | 0 | 0.065789 | 0 | 0.157895 | 0.500822 | 0.083128 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.105263 | 0 | 0.236842 | 0.013158 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9378d26413d10b7b3cf680e8ac7f047220e227f2 | 1,120 | py | Python | mindboggle/x/test_zernike/multiproc/Qklnu_m.py | cemlyn007/mindboggle | 947d4b3f41fb7a24c079550c7255c4d16939d740 | [
"CC-BY-3.0",
"Apache-2.0"
] | 118 | 2015-10-23T19:03:30.000Z | 2022-03-17T04:46:27.000Z | mindboggle/x/test_zernike/multiproc/Qklnu_m.py | cemlyn007/mindboggle | 947d4b3f41fb7a24c079550c7255c4d16939d740 | [
"CC-BY-3.0",
"Apache-2.0"
] | 136 | 2015-10-23T06:03:05.000Z | 2021-11-30T05:31:09.000Z | mindboggle/x/test_zernike/multiproc/Qklnu_m.py | cemlyn007/mindboggle | 947d4b3f41fb7a24c079550c7255c4d16939d740 | [
"CC-BY-3.0",
"Apache-2.0"
] | 56 | 2015-10-23T13:04:30.000Z | 2022-03-16T02:44:06.000Z | def Qklnu(cfg,k,l,nu) : #function q=Qklnu(k,l,nu)
#
#% Computes Q, neccesary constant#% Computes Q, neccesary constant
#% for the moments computation#% for the moments computation
#
aux_1=cfg.power(-1,k+nu)/cfg.power(4.0,k) #aux_1=power(-1,k+nu)/power(4,k)
aux_2=cfg.sqrt((2*l+4*k+3)/3.0) #aux_2=sqrt((2*l+4*k+3)/3)
aux_3=cfg.trinomial(nu,k-nu,l+nu+1)*cfg.nchoosek(2*(l+nu+1+k),l+nu+1+k) #aux_3=trinomial(nu,k-nu,l+nu+1)*nchoosek(2*(l+nu+1+k),l+nu+1+k)
aux_4=cfg.nchoosek(2.0*(l+nu+1),l+nu+1) #aux_4=nchoosek(2*(l+nu+1),l+nu+1)
q=(aux_1*aux_2*aux_3)/aux_4 #q=(aux_1*aux_2*aux_3)/aux_4
return q
| 93.333333 | 142 | 0.359821 | 151 | 1,120 | 2.562914 | 0.172185 | 0.093023 | 0.103359 | 0.05168 | 0.410853 | 0.387597 | 0.346253 | 0.20155 | 0.20155 | 0.113695 | 0 | 0.08079 | 0.502679 | 1,120 | 11 | 143 | 101.818182 | 0.614004 | 0.290179 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
fa7d5af88c9105ec28928cc59d4b5789f56ee777 | 223 | py | Python | util/singleton.py | PangPangPangPangPang/webapp | fa8d8b55071acaa65ee07f0be201173e2aaf6584 | [
"MIT"
] | null | null | null | util/singleton.py | PangPangPangPangPang/webapp | fa8d8b55071acaa65ee07f0be201173e2aaf6584 | [
"MIT"
] | null | null | null | util/singleton.py | PangPangPangPangPang/webapp | fa8d8b55071acaa65ee07f0be201173e2aaf6584 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017-05-23 16:23
# @Author : Max
# @File : singleton.py
def singleton(cls):
instance = cls()
instance.__call__ = lambda: instance
return instance
| 18.583333 | 40 | 0.605381 | 29 | 223 | 4.517241 | 0.793103 | 0.167939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.076023 | 0.233184 | 223 | 11 | 41 | 20.272727 | 0.690058 | 0.488789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
fa84aaf8038713f27827c3e3ee293bcf89e0bf0d | 479 | py | Python | site_pages/views.py | Boring-Mind/FreshInterns | cd9c0e24745d756ca9241cb8840cd54ea4960f34 | [
"MIT"
] | 1 | 2021-07-15T08:23:24.000Z | 2021-07-15T08:23:24.000Z | site_pages/views.py | Boring-Mind/FreshInterns | cd9c0e24745d756ca9241cb8840cd54ea4960f34 | [
"MIT"
] | null | null | null | site_pages/views.py | Boring-Mind/FreshInterns | cd9c0e24745d756ca9241cb8840cd54ea4960f34 | [
"MIT"
] | null | null | null | from django.urls import reverse
from django.views.generic.edit import CreateView
from .email import make_context, send_email
from .forms import UserProfileForm
class InternFormView(CreateView):
template_name = 'resume.html'
form_class = UserProfileForm
def form_valid(self, form):
context = make_context(form.cleaned_data)
send_email(context)
return super().form_valid(form)
def get_success_url(self):
return reverse('index')
| 25.210526 | 49 | 0.732777 | 60 | 479 | 5.666667 | 0.55 | 0.058824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.189979 | 479 | 18 | 50 | 26.611111 | 0.876289 | 0 | 0 | 0 | 0 | 0 | 0.033403 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153846 | false | 0 | 0.307692 | 0.076923 | 0.846154 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
faa18063953df55db8ea1a45922f4be3ad6a26ad | 241 | py | Python | raspberry/mpu6050/test.py | schindler/vibration_analyzer | 2f7af017113d0a33d75675a56642294e7080a58e | [
"MIT"
] | null | null | null | raspberry/mpu6050/test.py | schindler/vibration_analyzer | 2f7af017113d0a33d75675a56642294e7080a58e | [
"MIT"
] | null | null | null | raspberry/mpu6050/test.py | schindler/vibration_analyzer | 2f7af017113d0a33d75675a56642294e7080a58e | [
"MIT"
] | null | null | null | """
Test Module
"""
import datetime
import time
import mpu6050
if __name__ == "__main__":
MPU = mpu6050.Sensor()
for i in xrange(0, 60):
print str(datetime.datetime.now())+":"+str(MPU.temperature())
time.sleep(60)
| 17.214286 | 69 | 0.626556 | 31 | 241 | 4.612903 | 0.709677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.068783 | 0.215768 | 241 | 13 | 70 | 18.538462 | 0.687831 | 0 | 0 | 0 | 0 | 0 | 0.040724 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.375 | null | null | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
fabd6cd355ebd2d108efc9771febb694c264b5ea | 2,657 | py | Python | digest/tests/test_import_importpython.py | PURNA-ROCK/pythondigest | ba21758a25a47de19800b208c420f16d6688a16b | [
"MIT"
] | 124 | 2015-08-17T19:41:16.000Z | 2022-01-12T00:25:52.000Z | digest/tests/test_import_importpython.py | PURNA-ROCK/pythondigest | ba21758a25a47de19800b208c420f16d6688a16b | [
"MIT"
] | 62 | 2015-08-17T02:13:20.000Z | 2020-04-17T19:07:40.000Z | digest/tests/test_import_importpython.py | PURNA-ROCK/pythondigest | ba21758a25a47de19800b208c420f16d6688a16b | [
"MIT"
] | 73 | 2015-08-18T13:50:47.000Z | 2021-09-27T14:09:47.000Z | from django.test import TestCase
from mock import patch
from digest.management.commands.import_importpython import ImportPythonParser
from digest.utils import MockResponse, read_fixture
class ImportPythonWeeklyTest(TestCase):
def setUp(self):
self.url = "http://importpython.com/newsletter/no/60/"
test_fixture = 'fixture_test_import_importpython_test_get_blocks.txt'
self.patcher = patch(
'digest.management.commands.import_importpython.urlopen')
self.urlopen_mock = self.patcher.start()
self.urlopen_mock.return_value = MockResponse(
read_fixture(test_fixture))
self.parser = ImportPythonParser()
def tearDown(self):
self.patcher.stop()
def test_correctly_creates_issue_urls(self):
self.assertEqual(ImportPythonParser.get_issue_url(2),
"http://importpython.com/static/files/issue2.html")
self.assertEqual(ImportPythonParser.get_issue_url(12),
"http://importpython.com/newsletter/draft/12")
self.assertEqual(ImportPythonParser.get_issue_url(56),
"http://importpython.com/newsletter/no/56")
with self.assertRaises(ValueError):
ImportPythonParser.get_issue_url(-100)
def test_correct_number_of_blocks_parsed(self):
blocks = self.parser.get_blocks(self.url)
self.assertEqual(len(blocks), 25)
def test_correctly_parses_block(self):
blocks = self.parser.get_blocks(self.url)
block = blocks[0]
self.assertEqual(block['link'],
"https://talkpython.fm/episodes/show/44/project-jupyter-and-ipython")
self.assertEqual(block['title'],
"Project Jupyter and IPython Podcast Interview")
self.assertEqual(block['content'],
"One of the fastest growing areas in Python is scientific computing. In scientific computing with Python, there are a few key packages that make it special. These include NumPy / SciPy / and related packages. The one that brings it all together, visually, is IPython (now known as Project Jupyter). That's the topic on episode 44 of Talk Python To Me. ")
def test_correctly_gets_latest_url(self):
test_latest = 'fixture_test_import_importpython_test_get_latest_url.txt'
self._old_return_value = self.urlopen_mock.return_value
self.urlopen_mock.return_value = MockResponse(read_fixture(test_latest))
latest_url = self.parser.get_latest_issue_url()
self.assertEqual(latest_url,
"http://importpython.com/newsletter/no/72/")
| 49.203704 | 379 | 0.689123 | 317 | 2,657 | 5.577287 | 0.384858 | 0.067873 | 0.053733 | 0.065611 | 0.340498 | 0.275452 | 0.121606 | 0.100679 | 0.059955 | 0 | 0 | 0.011589 | 0.220549 | 2,657 | 53 | 380 | 50.132075 | 0.842105 | 0 | 0 | 0.045455 | 0 | 0.022727 | 0.321039 | 0.060971 | 0 | 0 | 0 | 0 | 0.204545 | 1 | 0.136364 | false | 0 | 0.409091 | 0 | 0.568182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
fabd9d1b3b80479f89e381e2ad06232af6f0bce0 | 3,068 | py | Python | stock.py | mikenjinoda/applewatch | 3bd78f0ac6c7ab774cab5eda8f739fc71ccf9b89 | [
"MIT"
] | null | null | null | stock.py | mikenjinoda/applewatch | 3bd78f0ac6c7ab774cab5eda8f739fc71ccf9b89 | [
"MIT"
] | null | null | null | stock.py | mikenjinoda/applewatch | 3bd78f0ac6c7ab774cab5eda8f739fc71ccf9b89 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import time
import sys
import smtplib
import requests
URL = "http://www.apple.com/shop/retail/pickup-message"
BUY = "http://store.apple.com/xc/product/"
def check_stock(model, zip_, dest, sec=5, login=None, pwd=None):
good_stores = []
my_alert = Alert(dest, login, pwd)
initmsg = "{0} start tracking {1} in {2}. Alert will sent to {3}".format(
time.strftime("%m/%d/%Y %H:%M:%S"), model, zip_, dest)
print initmsg
my_alert.send(initmsg)
params = {'parts.0': model,
'location': zip_}
while True:
print "=================================="
print "[{current}] Avaiable: {stores}".format(
current=time.strftime("%m/%d/%Y %H:%M:%S"),
stores=', '.join([store.encode('utf-8') for store in good_stores])
if good_stores else "None")
try:
stores = requests.get(URL, params=params).json()['body']['stores'][:8]
except (ValueError, KeyError):
print "Bad response from server..."
continue
for store in stores:
sname = store['storeName']
item = store['partsAvailability'][model]['storePickupProductTitle']
if store['partsAvailability'][model]['pickupDisplay'] \
== "available":
if sname not in good_stores:
good_stores.append(sname)
msg = "Gooo!! {store} has {item}!! {buy}{model}".format(
store=sname, item=item, buy=BUY, model=model)
print "{0} {1}".format(time.strftime("%m/%d/%Y %H:%M:%S"),
msg)
my_alert.send(msg)
else:
if sname in good_stores:
good_stores.remove(sname)
msg = "Oops all {item} in {store} are gone :( ".format(
item=item, store=sname)
print "{0} {1}".format(time.strftime("%m/%d/%Y %H:%M:%S"),
msg)
my_alert.send(msg)
time.sleep(int(sec))
class Alert(object):
def __init__(self, dest, login=None, password=None):
self.dest = dest
if login and password:
self.login = login
self.password = password
self.send = self.send_email
else:
self.send = self.send_sms
def send_email(self, msgbody):
message = "From: {0}\nTo: {1}\nSubject: {2}\n\n{3}".format(
self.login, self.dest, "Apple Stock Alert", msgbody)
mailer = smtplib.SMTP('smtp.gmail.com:587')
mailer.ehlo()
mailer.starttls()
mailer.ehlo()
mailer.login(self.login, self.password)
mailer.sendmail(self.login, self.dest, message)
mailer.close()
def send_sms(self, message):
r = requests.post('http://textbelt.com/text', data={
'number': self.dest, 'message': message})
if __name__ == '__main__':
check_stock(*sys.argv[1:])
| 37.876543 | 82 | 0.519557 | 355 | 3,068 | 4.4 | 0.369014 | 0.044814 | 0.033291 | 0.035851 | 0.116517 | 0.088348 | 0.088348 | 0.088348 | 0.076825 | 0.06146 | 0 | 0.009643 | 0.32399 | 3,068 | 80 | 83 | 38.35 | 0.743491 | 0.006519 | 0 | 0.140845 | 0 | 0.014085 | 0.206433 | 0.018707 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.056338 | 0.056338 | null | null | 0.084507 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
fabe2a3467357b65bb287beecc30e115d496c672 | 1,538 | py | Python | MOOSE_time/MOOSE_time/items.py | MOOSEOSS/MOOSE | 5b9a93c83b390f01cb9c49761510b3a5bf3c94a0 | [
"Apache-2.0"
] | 1 | 2021-09-02T09:32:41.000Z | 2021-09-02T09:32:41.000Z | MOOSE_time/MOOSE_time/items.py | MOOSEOSS/MOOSE | 5b9a93c83b390f01cb9c49761510b3a5bf3c94a0 | [
"Apache-2.0"
] | null | null | null | MOOSE_time/MOOSE_time/items.py | MOOSEOSS/MOOSE | 5b9a93c83b390f01cb9c49761510b3a5bf3c94a0 | [
"Apache-2.0"
] | 3 | 2021-08-30T08:34:51.000Z | 2021-09-30T13:14:31.000Z | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class MooseTimeItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
id = scrapy.Field()
event_id = scrapy.Field()
oss_id = scrapy.Field()
event_time = scrapy.Field()
issue_id = scrapy.Field()
issue_time = scrapy.Field()
pullrequest_id = scrapy.Field()
pullrequest_time = scrapy.Field()
commit_id = scrapy.Field()
commit_time = scrapy.Field()
class MOOSEUser(scrapy.Item):
user_id = scrapy.Field() #用户id
user_name = scrapy.Field() #用户登陆姓名
user_fullname = scrapy.Field() #用户姓名全程
avatar_url = scrapy.Field() #头像地址
follows_count = scrapy.Field() #被关注数
repos_count = scrapy.Field() #项目数
blog_url = scrapy.Field() #bolg地址
email_url = scrapy.Field() #emall地址
belong_org = scrapy.Field() #所属组织
org_member_count = scrapy.Field() # 组织会员数
user_type = scrapy.Field() #类别 0 user 1 org 存字符
user_create_time = scrapy.Field()
user_update_time = scrapy.Field()
update_time = scrapy.Field()
location = scrapy.Field()
company = scrapy.Field()
class MOOSEUserRepo(scrapy.Item):
user_id = scrapy.Field() #用户id
oss_id = scrapy.Field() #仓库id
user_type = scrapy.Field() #用户type
| 31.387755 | 68 | 0.605982 | 183 | 1,538 | 4.934426 | 0.387978 | 0.365449 | 0.129568 | 0.039867 | 0.06866 | 0.06866 | 0.06866 | 0 | 0 | 0 | 0 | 0.002715 | 0.281534 | 1,538 | 48 | 69 | 32.041667 | 0.81448 | 0.187256 | 0 | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.030303 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
fac2d25965d01df2e14260124356307de25ceb4b | 966 | py | Python | src/nwb_conversion_tools/datainterfaces/ecephys/kilosort/kilosortdatainterface.py | NeurodataWithoutBorders/nwbn-conversion-tools | 11aa4fa40af5deac28dd3643eb85425000bea780 | [
"BSD-3-Clause"
] | null | null | null | src/nwb_conversion_tools/datainterfaces/ecephys/kilosort/kilosortdatainterface.py | NeurodataWithoutBorders/nwbn-conversion-tools | 11aa4fa40af5deac28dd3643eb85425000bea780 | [
"BSD-3-Clause"
] | 6 | 2019-09-09T18:23:48.000Z | 2020-01-07T17:19:23.000Z | src/nwb_conversion_tools/datainterfaces/ecephys/kilosort/kilosortdatainterface.py | ben-dichter-consulting/nwb-conversion-tools | f5641317d2697a3916eeb54f74ce171ed65469ed | [
"BSD-3-Clause"
] | 1 | 2020-03-07T17:17:01.000Z | 2020-03-07T17:17:01.000Z | """Authors: Heberto Mayorquin, Cody Baker."""
from typing import Optional
from spikeinterface.extractors import KiloSortSortingExtractor
from ..basesortingextractorinterface import BaseSortingExtractorInterface
from ....utils import FolderPathType
class KilosortSortingInterface(BaseSortingExtractorInterface):
"""Primary data interface class for converting a KiloSortingExtractor from spikeinterface."""
SX = KiloSortSortingExtractor
def __init__(self, folder_path: FolderPathType, keep_good_only: bool = False, verbose: bool = True):
"""
Load and prepare sorting data for kilosort
Parameters
----------
folder_path: str or Path
Path to the output Phy folder (containing the params.py)
keep_good_only: bool
If True, only Kilosort-labeled 'good' units are returned
"""
super().__init__(folder_path=folder_path, keep_good_only=keep_good_only, verbose=verbose)
| 35.777778 | 104 | 0.725673 | 101 | 966 | 6.742574 | 0.574257 | 0.058737 | 0.070485 | 0.04699 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.198758 | 966 | 26 | 105 | 37.153846 | 0.879845 | 0.373706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.5 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
fac8370039b6f9d63aaf1f90e08f385dc5e471a9 | 503 | py | Python | docs/help.py | Lemaf/spo | 623244897040de73a2ac7db764d0d07609c149e3 | [
"MIT"
] | null | null | null | docs/help.py | Lemaf/spo | 623244897040de73a2ac7db764d0d07609c149e3 | [
"MIT"
] | null | null | null | docs/help.py | Lemaf/spo | 623244897040de73a2ac7db764d0d07609c149e3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
parser = argparse.ArgumentParser(description="Script standardization of file and folder,paths. The script checks the current date and organizes backups of folders and releases according to the date prior to the current day (without compromising the work), and generating log in /var/log/orgfolders/orgfolders.log.")
parser.add_argument("integers", metavar="-v", type=int, nargs="+" ,
help="Verbose mode")
args = parser.parse_args()
print args.accumulate(args.integers)
| 45.727273 | 315 | 0.767396 | 71 | 503 | 5.408451 | 0.704225 | 0.052083 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129225 | 503 | 10 | 316 | 50.3 | 0.876712 | 0.039761 | 0 | 0 | 0 | 0.2 | 0.602911 | 0.072765 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.2 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
fac962da564cca951cf20fed6f4b63a792b68ede | 2,853 | py | Python | make_rsa_kay.py | m-takeuchi/ilislife | f4d1a68ddd81e5318aa448393ee7f4b9278e8fa7 | [
"MIT"
] | null | null | null | make_rsa_kay.py | m-takeuchi/ilislife | f4d1a68ddd81e5318aa448393ee7f4b9278e8fa7 | [
"MIT"
] | null | null | null | make_rsa_kay.py | m-takeuchi/ilislife | f4d1a68ddd81e5318aa448393ee7f4b9278e8fa7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# coding=utf-8
from Crypto.PublicKey import RSA
from Crypto import Random
import os,sys
random_func = Random.new().read
rsa = RSA.generate(2048, random_func)
def get_id_rsa(id_rsa_file, passphrase=None):
with open(id_rsa_file, 'rb') as f:
id_rsa = RSA.importKey(f.read())
return id_rsa
def get_id_rsa_pub(id_rsa_pub_file, passphrase=None):
with open(id_rsa_pub_file, 'rb') as f:
id_rsa_pub = RSA.importKey(f.read())
# id_rsa_pub = RSA.pudkey(f.read())
return id_rsa_pub
def make_id_rsa(id_rsa_file, passphrase=None):
# private_key = rsa.exportKey(format='PEM', passphrase='hogehoge')
private_key = rsa.exportKey(format='PEM', passphrase=passphrase)
with open(id_rsa_file, 'wb') as f:
f.write(private_key)
def make_id_rsa_pub(id_rsa_pub_file, id_rsa_file, pasphrase=None):
id_rsa = get_id_rsa(id_rsa_file)
# print(id_rsa)
public_pem = id_rsa.publickey().exportKey()
# print(public_pem)
with open(id_rsa_pub_file, 'wb') as f:
f.write(public_pem)
def encrypt_file(id_rsa_pub_file, plain_text_file, encrypted_text_file):
# # 公開鍵による暗号化
id_rsa_pub = get_id_rsa_pub(id_rsa_pub_file)
print(id_rsa_pub)
with open(plain_text_file, 'r') as f:
plain_text = f.read()
with open(encrypted_text_file, 'wb') as f2:
f2.write(id_rsa_pub.encrypt(plain_text, random_func)[0])
def encrypt_str(id_rsa_pub_file, string, encrypted_text_file):
# # 公開鍵による暗号化
id_rsa_pub = get_id_rsa_pub(id_rsa_pub_file)
print(id_rsa_pub)
with open(encrypted_text_file, 'wb') as f:
f.write(id_rsa_pub.encrypt(string, random_func)[0])
#
# # 秘密鍵による復号化
# with open('cipher.txt', 'r') as f:
# with open('plain_decoded.txt', 'w') as f2:
# f2.write(RSA.importKey(private_pem, 'hogehoge').decrypt(f.read()))
#
# # 秘密鍵による電子署名の作成
# with open('file.txt', 'r') as f:
# with open('signature.bin', 'w') as f2:
# f2.write(str(RSA.importKey(private_pem, 'hogehoge').sign(f.read(), random_func)[0]))
#
# # 公開鍵による電子署名の検証
# with open('signature.bin', 'r') as f:
# with open('file.txt', 'r') as f2:
# rsa.verify(f2.read(), (long(f.read()),))
if __name__ == '__main__':
HOME = os.path.expanduser('~/')
SSH = os.path.expanduser(HOME+'.ssh/')
RSA_FILES = {'PRV': SSH+'id_rsa', 'PUB':SSH+'id_rsa.pub'}
if not os.path.isdir(SSH):
print('No such '+SSH+ 'directory. '+'Making ' + SSH)
os.mkdir(SSH)
if not os.path.isfile(RSA_FILES['PRV']):
make_id_rsa(RSA_FILES['PRV'], 600)
if not os.path.isfile(RSA_FILES['PUB']):
make_id_rsa_pub(RSA_FILES['PUB'], RSA_FILES['PRV'])
input_gmail_password = input('Please input gmail password>>> ')
# print(input_gmail_password)
encrypt_str(RSA_FILES['PUB'], bytes(input_gmail_password, 'utf-8'), 'pass.rsa')
| 33.564706 | 94 | 0.670172 | 458 | 2,853 | 3.877729 | 0.203057 | 0.109797 | 0.108108 | 0.054054 | 0.455518 | 0.360923 | 0.268581 | 0.100788 | 0.087838 | 0.087838 | 0 | 0.008921 | 0.174904 | 2,853 | 84 | 95 | 33.964286 | 0.74554 | 0.247809 | 0 | 0.085106 | 1 | 0 | 0.067076 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12766 | false | 0.12766 | 0.106383 | 0 | 0.276596 | 0.06383 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
fad4a1f1f9acb4fda7c7f38a0611d33858273f7f | 333 | py | Python | tests/common/test_version.py | jungtaekkim/bayeso | d11c9ff8037cf7fd3f9b41362eaab120f1224c71 | [
"MIT"
] | 76 | 2018-01-18T03:03:14.000Z | 2022-02-07T06:41:41.000Z | tests/common/test_version.py | POSTECH-CVLab/bayeso | d11c9ff8037cf7fd3f9b41362eaab120f1224c71 | [
"MIT"
] | 20 | 2018-06-29T16:48:03.000Z | 2021-04-19T00:30:57.000Z | tests/common/test_version.py | POSTECH-CVLab/bayeso | d11c9ff8037cf7fd3f9b41362eaab120f1224c71 | [
"MIT"
] | 4 | 2020-01-07T06:24:17.000Z | 2021-06-11T06:21:42.000Z | #
# author: Jungtaek Kim (jtkim@postech.ac.kr)
# last updated: June 24, 2021
#
"""test_import"""
STR_VERSION = '0.5.1'
def test_version_bayeso():
import bayeso
assert bayeso.__version__ == STR_VERSION
def test_version_setup():
import pkg_resources
assert pkg_resources.require("bayeso")[0].version == STR_VERSION
| 19.588235 | 68 | 0.714715 | 46 | 333 | 4.869565 | 0.565217 | 0.133929 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035714 | 0.159159 | 333 | 16 | 69 | 20.8125 | 0.764286 | 0.249249 | 0 | 0 | 0 | 0 | 0.045643 | 0 | 0 | 0 | 0 | 0 | 0.285714 | 1 | 0.285714 | false | 0 | 0.285714 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
fad6540e7314e7867ec374d5788dbee622590620 | 547 | py | Python | pokemon/schema.py | lflimeira/django-api-pokemon | e44bc6d586f9040de95a0317061080ec8c11a430 | [
"MIT"
] | null | null | null | pokemon/schema.py | lflimeira/django-api-pokemon | e44bc6d586f9040de95a0317061080ec8c11a430 | [
"MIT"
] | null | null | null | pokemon/schema.py | lflimeira/django-api-pokemon | e44bc6d586f9040de95a0317061080ec8c11a430 | [
"MIT"
] | null | null | null | import graphene
import graphql_jwt
import trainers.schema
import users.schema
import pokemons.schema
class Query(
users.schema.Query,
trainers.schema.Query,
pokemons.schema.Query,
graphene.ObjectType
):
pass
class Mutation(users.schema.Mutation, trainers.schema.Mutation, graphene.ObjectType):
token_auth = graphql_jwt.ObtainJSONWebToken.Field()
verify_token = graphql_jwt.Verify.Field()
refresh_token = graphql_jwt.Refresh.Field()
schema = graphene.Schema(query=Query, mutation=Mutation) | 26.047619 | 85 | 0.74223 | 63 | 547 | 6.333333 | 0.301587 | 0.100251 | 0.075188 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.170018 | 547 | 21 | 86 | 26.047619 | 0.878855 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.058824 | 0.294118 | 0 | 0.588235 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
fad7c55e478a60e874c4d2641c08eb90fb15d1a6 | 491 | py | Python | sdapis/migrations/0012_post_comment_list.py | mmtahir-dev/cmput404-socialdistribution | a5a4749c8d9c27ccd062e33be5a4fb2f76697394 | [
"MIT"
] | null | null | null | sdapis/migrations/0012_post_comment_list.py | mmtahir-dev/cmput404-socialdistribution | a5a4749c8d9c27ccd062e33be5a4fb2f76697394 | [
"MIT"
] | 49 | 2021-09-28T21:52:40.000Z | 2021-10-03T22:35:49.000Z | sdapis/migrations/0012_post_comment_list.py | mmtahir-dev/cmput404-socialdistribution | a5a4749c8d9c27ccd062e33be5a4fb2f76697394 | [
"MIT"
] | 1 | 2021-09-27T03:20:36.000Z | 2021-09-27T03:20:36.000Z | # Generated by Django 3.2.8 on 2021-10-22 04:33
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sdapis', '0011_auto_20211021_2133'),
]
operations = [
migrations.AddField(
model_name='post',
name='comment_list',
field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(), default=list, size=None),
),
]
| 24.55 | 116 | 0.649695 | 56 | 491 | 5.589286 | 0.75 | 0.083067 | 0.134185 | 0.172524 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.082447 | 0.234216 | 491 | 19 | 117 | 25.842105 | 0.75 | 0.09165 | 0 | 0 | 1 | 0 | 0.101351 | 0.051802 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.153846 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
fae24f18116863a912f140c392a70ab8e6b47946 | 707 | py | Python | openbook_common/translation.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 164 | 2019-07-29T17:59:06.000Z | 2022-03-19T21:36:01.000Z | openbook_common/translation.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 188 | 2019-03-16T09:53:25.000Z | 2019-07-25T14:57:24.000Z | openbook_common/translation.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 80 | 2019-08-03T17:49:08.000Z | 2022-02-28T16:56:33.000Z | from modeltranslation.translator import translator, TranslationOptions
from openbook_common.models import Emoji, EmojiGroup, Badge, Language
class EmojiGroupTranslationOptions(TranslationOptions):
fields = ('keyword',)
translator.register(EmojiGroup, EmojiGroupTranslationOptions)
class EmojiTranslationOptions(TranslationOptions):
fields = ('keyword',)
translator.register(Emoji, EmojiTranslationOptions)
class BadgeTranslationOptions(TranslationOptions):
fields = ('keyword_description',)
translator.register(Badge, BadgeTranslationOptions)
class LanguageTranslationOptions(TranslationOptions):
fields = ('name',)
translator.register(Language, LanguageTranslationOptions)
| 22.09375 | 70 | 0.81471 | 52 | 707 | 11.038462 | 0.423077 | 0.167247 | 0.162021 | 0.142857 | 0.170732 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.103253 | 707 | 31 | 71 | 22.806452 | 0.905363 | 0 | 0 | 0.142857 | 0 | 0 | 0.052334 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.714286 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
fae384754e4e4a9081ba9327b56797e7d30bdb0c | 703 | py | Python | tests/test_A000142.py | eliott02022000/oeis | 5a2245660fd91012f1bddad99698bdc4a80cebe0 | [
"MIT"
] | null | null | null | tests/test_A000142.py | eliott02022000/oeis | 5a2245660fd91012f1bddad99698bdc4a80cebe0 | [
"MIT"
] | null | null | null | tests/test_A000142.py | eliott02022000/oeis | 5a2245660fd91012f1bddad99698bdc4a80cebe0 | [
"MIT"
] | null | null | null | from oeis import A000142
from hypothesis import given
from hypothesis.strategies import integers
def test_sequence():
assert A000142() == [
1,
1,
2,
6,
24,
120,
720,
5040,
40320,
362880,
3628800,
39916800,
479001600,
6227020800,
87178291200,
1307674368000,
20922789888000,
355687428096000,
6402373705728000,
121645100408832000,
]
@given(integers(min_value=0, max_value=20), integers(min_value=1, max_value=20))
def test_sequence_length(start, limit):
assert len(A000142(start, limit)) == limit
| 20.676471 | 81 | 0.55192 | 65 | 703 | 5.861538 | 0.615385 | 0.073491 | 0.07874 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.388262 | 0.369844 | 703 | 33 | 82 | 21.30303 | 0.471783 | 0 | 0 | 0.068966 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.068966 | 1 | 0.068966 | false | 0 | 0.103448 | 0 | 0.172414 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
faef25753a3638f3222e40447e0c9e032e031933 | 1,202 | py | Python | magnum/conductor/utils.py | mjbrewer/testIndex | 420dc071d4240a89b6f266e8d2575cedb39bfea0 | [
"Apache-2.0"
] | null | null | null | magnum/conductor/utils.py | mjbrewer/testIndex | 420dc071d4240a89b6f266e8d2575cedb39bfea0 | [
"Apache-2.0"
] | null | null | null | magnum/conductor/utils.py | mjbrewer/testIndex | 420dc071d4240a89b6f266e8d2575cedb39bfea0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Huawei Technologies Co.,LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from magnum.common import clients
from magnum import objects
def retrieve_bay(context, obj):
return objects.Bay.get_by_uuid(context, obj.bay_uuid)
def retrieve_baymodel(context, bay):
return objects.BayModel.get_by_uuid(context, bay.baymodel_id)
def object_has_stack(context, obj):
osc = clients.OpenStackClients(context)
if hasattr(obj, 'bay_uuid'):
obj = retrieve_bay(context, obj)
stack = osc.heat().stacks.get(obj.stack_id)
if (stack.stack_status == 'DELETE_COMPLETE' or
stack.stack_status == 'DELETE_IN_PROGRESS'):
return False
return True
| 31.631579 | 74 | 0.738769 | 176 | 1,202 | 4.943182 | 0.545455 | 0.068966 | 0.029885 | 0.036782 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008089 | 0.177205 | 1,202 | 37 | 75 | 32.486486 | 0.871587 | 0.47005 | 0 | 0 | 0 | 0 | 0.065811 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.133333 | 0.133333 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
faf512f0d9ba2b64ebabc9264d17d0e961c6a683 | 175 | py | Python | back-end/app/api/demo.py | JAYqq/MonGo | e33c9f62c2cf494af2b2d33408853294f3aed168 | [
"MIT"
] | 1 | 2019-03-26T04:44:59.000Z | 2019-03-26T04:44:59.000Z | back-end/app/api/demo.py | JAYqq/MonGo | e33c9f62c2cf494af2b2d33408853294f3aed168 | [
"MIT"
] | 5 | 2020-02-12T13:32:08.000Z | 2021-06-02T00:27:16.000Z | back-end/app/api/demo.py | JAYqq/MonGo | e33c9f62c2cf494af2b2d33408853294f3aed168 | [
"MIT"
] | null | null | null | from app import db
from app.models import User
def test():
users=User.query.all()
for user in users:
print(user.username)
if __name__ == "__main__":
test() | 21.875 | 28 | 0.657143 | 26 | 175 | 4.115385 | 0.692308 | 0.130841 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.228571 | 175 | 8 | 29 | 21.875 | 0.792593 | 0 | 0 | 0 | 0 | 0 | 0.045455 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.25 | 0 | 0.375 | 0.125 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
faf87e9087b7a57fd0c5667514bd6e244bef31cf | 412 | py | Python | kolibri_exercise_perseus_plugin/kolibri_plugin.py | nucleogenesis/kolibri-exercise-perseus-plugin | 7752d0bbe7b1a1f86e08c8d04058b4d68e6ee8fb | [
"MIT"
] | null | null | null | kolibri_exercise_perseus_plugin/kolibri_plugin.py | nucleogenesis/kolibri-exercise-perseus-plugin | 7752d0bbe7b1a1f86e08c8d04058b4d68e6ee8fb | [
"MIT"
] | null | null | null | kolibri_exercise_perseus_plugin/kolibri_plugin.py | nucleogenesis/kolibri-exercise-perseus-plugin | 7752d0bbe7b1a1f86e08c8d04058b4d68e6ee8fb | [
"MIT"
] | null | null | null | from __future__ import absolute_import, print_function, unicode_literals
from kolibri.core.content import hooks as content_hooks
from kolibri.plugins.base import KolibriPluginBase
class ExercisePerseusRenderPlugin(KolibriPluginBase):
pass
class ExercisePerseusRenderAsset(content_hooks.ContentRendererHook):
bundle_id = "exercise_perseus_render_module"
content_types_file = "content_types.json"
| 29.428571 | 72 | 0.84466 | 45 | 412 | 7.377778 | 0.666667 | 0.066265 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.106796 | 412 | 13 | 73 | 31.692308 | 0.902174 | 0 | 0 | 0 | 0 | 0 | 0.116505 | 0.072816 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.125 | 0.375 | 0 | 0.875 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
faff19a15adeba3d8740b7542e77a85707e6896c | 405 | py | Python | zvt/domain/quotes/coin_tick_kdata.py | fakegit/zvt | d7d7d592ba870944e9cb967a8d1e46d12c24d02a | [
"MIT"
] | null | null | null | zvt/domain/quotes/coin_tick_kdata.py | fakegit/zvt | d7d7d592ba870944e9cb967a8d1e46d12c24d02a | [
"MIT"
] | null | null | null | zvt/domain/quotes/coin_tick_kdata.py | fakegit/zvt | d7d7d592ba870944e9cb967a8d1e46d12c24d02a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from sqlalchemy.ext.declarative import declarative_base
# 数字货币tick
from zvdata.contract import register_schema
from zvt.domain.quotes import TickCommon
CoinTickKdataBase = declarative_base()
class CoinTickKdata(CoinTickKdataBase, TickCommon):
__tablename__ = 'coin_tick_kdata'
register_schema(providers=['ccxt'], db_name='coin_tick_kdata', schema_base=CoinTickKdataBase)
| 25.3125 | 93 | 0.804938 | 46 | 405 | 6.782609 | 0.630435 | 0.096154 | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002747 | 0.101235 | 405 | 15 | 94 | 27 | 0.854396 | 0.074074 | 0 | 0 | 0 | 0 | 0.091398 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.428571 | 0 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
4f0730ced8f79fb3ba0a1c5079ba78d153220ed5 | 601 | py | Python | api/migrations/0024_auto_20180203_2206.py | PPinto22/watergenius-backend | 5ee795485b2b4b4c2b334461c7ddb0ef2496096a | [
"Apache-2.0"
] | null | null | null | api/migrations/0024_auto_20180203_2206.py | PPinto22/watergenius-backend | 5ee795485b2b4b4c2b334461c7ddb0ef2496096a | [
"Apache-2.0"
] | 2 | 2021-03-19T23:01:20.000Z | 2021-06-10T22:30:57.000Z | api/migrations/0024_auto_20180203_2206.py | PPinto22/watergenius-backend | 5ee795485b2b4b4c2b334461c7ddb0ef2496096a | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.0 on 2018-02-03 22:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0023_auto_20180131_1821'),
]
operations = [
migrations.AlterField(
model_name='dayplan',
name='dayplan_water_qty_unit',
field=models.CharField(default='L', max_length=6),
),
migrations.AlterField(
model_name='irrigationtime',
name='irrigation_time_qty_unit',
field=models.CharField(default='L', max_length=10),
),
]
| 25.041667 | 63 | 0.602329 | 64 | 601 | 5.453125 | 0.671875 | 0.114613 | 0.143266 | 0.166189 | 0.252149 | 0.252149 | 0.252149 | 0.252149 | 0.252149 | 0 | 0 | 0.076566 | 0.282862 | 601 | 23 | 64 | 26.130435 | 0.733179 | 0.071547 | 0 | 0.235294 | 1 | 0 | 0.170863 | 0.124101 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.058824 | 0 | 0.235294 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
4f0b4091f37b77d4872ba3f56eaa0f40323abb93 | 489 | py | Python | NST/models/network.py | VITA-Group/Sandwich-Batch-Normalization | 25e7df6e64a67cebd7e70b911f874cfc1bd19df0 | [
"MIT"
] | 46 | 2021-02-20T18:49:46.000Z | 2022-03-24T08:46:20.000Z | NST/models/network.py | VITA-Group/Sandwich-Batch-Normalization | 25e7df6e64a67cebd7e70b911f874cfc1bd19df0 | [
"MIT"
] | null | null | null | NST/models/network.py | VITA-Group/Sandwich-Batch-Normalization | 25e7df6e64a67cebd7e70b911f874cfc1bd19df0 | [
"MIT"
] | 3 | 2021-02-23T07:28:12.000Z | 2021-02-26T01:09:56.000Z | # -*- coding: utf-8 -*-
# @Date : 2/17/21
# @Author : Xinyu Gong (xinyu.gong@utexas.edu)
# @Link : None
# @Version : 0.0
from .modules import AdaIN, SaAdaIN, BaseStyleNet
class AdaINNet(BaseStyleNet):
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
self.style_norm = AdaIN()
class SaAdaINNet(BaseStyleNet):
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
self.style_norm = SaAdaIN(512)
| 23.285714 | 49 | 0.648262 | 58 | 489 | 5.155172 | 0.586207 | 0.187291 | 0.12709 | 0.153846 | 0.488294 | 0.488294 | 0.488294 | 0.488294 | 0.488294 | 0.488294 | 0 | 0.028571 | 0.212679 | 489 | 20 | 50 | 24.45 | 0.748052 | 0.239264 | 0 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
8782113e8e7e4bd459626f9f51560cb481062379 | 133 | py | Python | WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/mini-scripts/python_Change_a_Range_of_Item_Values_2.txt.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | 5 | 2021-06-02T23:44:25.000Z | 2021-12-27T16:21:57.000Z | WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/mini-scripts/python_Change_a_Range_of_Item_Values_2.txt.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | 22 | 2021-05-31T01:33:25.000Z | 2021-10-18T18:32:39.000Z | WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/mini-scripts/python_Change_a_Range_of_Item_Values_2.txt.py | webdevhub42/Lambda | b04b84fb5b82fe7c8b12680149e25ae0d27a0960 | [
"MIT"
] | 3 | 2021-06-19T03:37:47.000Z | 2021-08-31T00:49:51.000Z | thislist = ["apple", "banana", "cherry", "orange", "kiwi", "mango"]
thislist[1:3] = ["blackcurrant", "watermelon"]
print(thislist)
| 22.166667 | 67 | 0.639098 | 14 | 133 | 6.071429 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016949 | 0.112782 | 133 | 5 | 68 | 26.6 | 0.70339 | 0 | 0 | 0 | 0 | 0 | 0.406015 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8782792c125aa343038e2d1b0394a83fcb8ef02f | 2,779 | py | Python | extractseqs.py | prakashraaz/bootcamp_test | c664b4b6ff304009924c5680cc6f0c84c75833d3 | [
"MIT"
] | null | null | null | extractseqs.py | prakashraaz/bootcamp_test | c664b4b6ff304009924c5680cc6f0c84c75833d3 | [
"MIT"
] | null | null | null | extractseqs.py | prakashraaz/bootcamp_test | c664b4b6ff304009924c5680cc6f0c84c75833d3 | [
"MIT"
] | null | null | null | import re, os
qsub_command = "qsub"
qsub_command2 = "echo"
list1 = ["Acorus", "Burmannia", "Chamaedorea", "Costus", "Cypripedium", "Dasypogon",
"Gymnosiphon", "Hanguana", "Japos", "Joinvillea", "Lacodonia", "Lilium",
"Neoregelia", "Pandanus", "Saurattum", "Tacca", "Talbotia", "Triuris", "Typha"]
list2 = ["Aletris", "Anemarrhena", "Anomochloa", "Aphelia", "Aphyllanthes", "Aponogeton",
"Apostasia", "Astelia", "Baxteria", "Behnia", "Calectasia", "Campynemante",
"Centromono", "Centrostri", "Chlorophytum", "Croomia", "Cyperus", "Dendrobium",
"Doryanthes", "Ecdeiocolea", "Elegia", "Flagellaria", "Hemiphylacus", "Hosta",
"Ixiolirion", "Juncus", "Kingia", "Lachocaulan", "Lanaria", "Leochilus",
"Mayaca", "Mexipedium", "Molinaria", "Paphiopedilum", "Parasitaxus", "Paris",
"Phragmipedium", "Sagittaria", "Saurattum", "Stegolepis", "Streptochaeta",
"Tradescantia", "Trithuria", "Triurfem", "Triuris", "Triurmal", "Uvularia", "Xyris"]
list3 = list1 + list2
for name in list3:
script = """
#PBS -l nodes=1:ppn=1
#PBS -l walltime=01:00:00
#PBS -l pmem=4gb
#PBS -m bea
cd /gpfs/home/prt119/biostar/projects/monatol/Single_copy_analysis/work_2016_Feb/{0}_orthos
cat SC_orthos.ids | xargs -I {} grep -w {} {0}_genes_and_orthos_sorted | sort -k2 | uniq > Orthos_{0}.ids
mkdir Orthos_{0}
perl -e' %seq; $id; open(IN, "{0}_gtcleaned.fasta.cds"); while(<IN>){chomp; if(/^>(\S+)/){$id=$1;} else{s/\s+//g; $seq{$id}{fna} .= $_;} } close IN; open(IN, "{0}_gtcleaned.fasta.pep"); while(<IN>){chomp; if(/^>(\S+)/){$id=$1;} else{s/\s+//g; $seq{$id}{faa} .= $_;} } close IN; %orthos; while(<>){chomp; @F=split(/\t/, $_); $orthos{$F[1]}{$F[0]} = $F[0]; } foreach $ortho_id (keys %orthos){open(F1, ">Orthos_{0}/$ortho_id.fna"); open(F2, ">Orthos_{0}/$ortho_id.faa"); foreach $seq_id(keys %{$orthos{$ortho_id}}){print F1 ">$seq_id\n$seq{$seq_id}{fna}\n"; print F2 ">$seq_id\n$seq{$seq_id}{faa}\n"; } close F1; close F2;}' < {0}_genes_and_orthos_sorted
#mv Orthos_{0}.ids Orthos_{0}
cp /gpfs/home/prt119/biostar/projects/monatol/Single_copy_analysis/SC_orthos.ids /gpfs/home/prt119/biostar/projects/monatol/Single_copy_analysis/work_2016_Feb/{0}_orthos/Orthos_{0}
cd /gpfs/home/prt119/biostar/projects/monatol/Single_copy_analysis/work_2016_Feb/{0}_orthos/Orthos_{0}
mkdir {0}_single_copies
cat SC_orthos.ids | xargs -I {} cp {}.fna {0}_single_copies/
cat SC_orthos.ids | xargs -I {} cp {}.faa {0}_single_copies/
"""
names = re.sub('\{0\}', name, script)
names = re.sub('\{1\}', name[:4], names)
print names
filename = "extracting_" + name + ".pbs"
fh = open(filename, "w")
fh.write(names)
fh.write("\n")
fh.close()
os.system('%s %s' % (qsub_command, filename))
os.system('sleep 1')
| 52.433962 | 653 | 0.646276 | 378 | 2,779 | 4.589947 | 0.42328 | 0.032277 | 0.032277 | 0.048415 | 0.317003 | 0.268588 | 0.240922 | 0.240922 | 0.240922 | 0.209798 | 0 | 0.030604 | 0.129903 | 2,779 | 52 | 654 | 53.442308 | 0.686931 | 0 | 0 | 0 | 0 | 0.119048 | 0.742888 | 0.247749 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.02381 | null | null | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8790b7f51c2629f943347e6e93cffb45e19c83fa | 242 | py | Python | _notebooks/test_mark_examples.py | gunnhildsp/notes | 224c31e5c53b21fa33d481a6d841a33686d50872 | [
"Apache-2.0"
] | null | null | null | _notebooks/test_mark_examples.py | gunnhildsp/notes | 224c31e5c53b21fa33d481a6d841a33686d50872 | [
"Apache-2.0"
] | null | null | null | _notebooks/test_mark_examples.py | gunnhildsp/notes | 224c31e5c53b21fa33d481a6d841a33686d50872 | [
"Apache-2.0"
] | null | null | null | import pytest
@pytest.mark.this
def test_example():
print("Hello")
assert True
@pytest.mark.this
@pytest.mark.that
def test_several_marks():
print("Nothing")
assert True
def test_unmarked():
print("Hello")
assert 1
| 14.235294 | 25 | 0.681818 | 33 | 242 | 4.878788 | 0.515152 | 0.186335 | 0.173913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005128 | 0.194215 | 242 | 16 | 26 | 15.125 | 0.820513 | 0 | 0 | 0.461538 | 0 | 0 | 0.070248 | 0 | 0 | 0 | 0 | 0 | 0.230769 | 1 | 0.230769 | true | 0 | 0.076923 | 0 | 0.307692 | 0.230769 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
879e9d82e7bb6f861d093e3d4973476b4aa9133e | 861 | py | Python | homeassistant/components/tractive/diagnostics.py | jlmaners/core | 9d016dd4346ec776da40f816764a5be441e34a3b | [
"Apache-2.0"
] | 30,023 | 2016-04-13T10:17:53.000Z | 2020-03-02T12:56:31.000Z | homeassistant/components/tractive/diagnostics.py | jlmaners/core | 9d016dd4346ec776da40f816764a5be441e34a3b | [
"Apache-2.0"
] | 24,710 | 2016-04-13T08:27:26.000Z | 2020-03-02T12:59:13.000Z | homeassistant/components/tractive/diagnostics.py | jlmaners/core | 9d016dd4346ec776da40f816764a5be441e34a3b | [
"Apache-2.0"
] | 11,956 | 2016-04-13T18:42:31.000Z | 2020-03-02T09:32:12.000Z | """Diagnostics support for Tractive."""
from __future__ import annotations
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from .const import DOMAIN, TRACKABLES
TO_REDACT = {CONF_PASSWORD, CONF_EMAIL, "title", "_id"}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict:
"""Return diagnostics for a config entry."""
trackables = hass.data[DOMAIN][config_entry.entry_id][TRACKABLES]
diagnostics_data = async_redact_data(
{
"config_entry": config_entry.as_dict(),
"trackables": [item.trackable for item in trackables],
},
TO_REDACT,
)
return diagnostics_data
| 29.689655 | 69 | 0.743322 | 99 | 861 | 6.181818 | 0.373737 | 0.107843 | 0.04902 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.175377 | 861 | 28 | 70 | 30.75 | 0.861972 | 0.038328 | 0 | 0 | 0 | 0 | 0.03856 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.105263 | 0.315789 | 0 | 0.368421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
879fac0a0781f60eccce061166dce95c8fabae26 | 658 | py | Python | micro/ast_token.py | thewizardplusplus/micro | 3078dbcbd2ea17b0be3ab403fb03bb210b19f01c | [
"MIT"
] | 12 | 2017-08-06T10:35:59.000Z | 2018-11-30T13:13:59.000Z | micro/ast_token.py | thewizardplusplus/micro | 3078dbcbd2ea17b0be3ab403fb03bb210b19f01c | [
"MIT"
] | 194 | 2016-08-22T20:41:46.000Z | 2018-03-20T00:25:27.000Z | micro/ast_token.py | thewizardplusplus/micro | 3078dbcbd2ea17b0be3ab403fb03bb210b19f01c | [
"MIT"
] | 1 | 2021-05-19T22:39:44.000Z | 2021-05-19T22:39:44.000Z | import json
class AstToken:
def __init__(self, lex_token):
self.name = lex_token.type
if lex_token.value != self.name:
self.value = lex_token.value
def __str__(self):
return json.dumps(self, cls=AstTokenEncoder)
class AstTokenList:
def __init__(self, tokens):
self.tokens = tokens
def __str__(self):
return json.dumps(self.tokens, cls=AstTokenEncoder)
class AstTokenEncoder(json.JSONEncoder):
def default(self, some_object):
if not isinstance(some_object, AstToken):
return super(AstTokenEncoder, self).default(some_object)
return some_object.__dict__
| 26.32 | 68 | 0.674772 | 79 | 658 | 5.265823 | 0.35443 | 0.076923 | 0.052885 | 0.076923 | 0.139423 | 0.139423 | 0.139423 | 0 | 0 | 0 | 0 | 0 | 0.235562 | 658 | 24 | 69 | 27.416667 | 0.827038 | 0 | 0 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.277778 | false | 0 | 0.055556 | 0.111111 | 0.722222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
87a0bfbf78ba46fcd549dc2d04f0114ce4da1a94 | 1,417 | py | Python | tests/test/base/test_hold.py | Eve-ning/reamber_base_py | 6d19c84f2c110b60e633b82b73e0516396466f56 | [
"MIT"
] | 10 | 2020-06-28T11:16:36.000Z | 2021-08-09T21:41:43.000Z | tests/test/base/test_hold.py | Eve-ning/reamberPy | 6d19c84f2c110b60e633b82b73e0516396466f56 | [
"MIT"
] | 35 | 2020-06-18T13:05:50.000Z | 2022-02-18T10:13:35.000Z | tests/test/base/test_hold.py | Eve-ning/reamber_base_py | 6d19c84f2c110b60e633b82b73e0516396466f56 | [
"MIT"
] | 2 | 2021-05-26T17:05:06.000Z | 2021-06-12T18:42:13.000Z | import unittest
import pandas as pd
from reamber.base import Hold
class TestHold(unittest.TestCase):
""" The purpose of this test is to test the architecture of Base. """
def setUp(self) -> None:
self.hold = Hold(offset=1000, column=1, length=1000)
# @profile
def test_type(self):
self.assertTrue(isinstance(self.hold.data, pd.Series))
def test_eq(self):
self.assertEqual(Hold(offset=1000, column=1, length=1000), self.hold)
self.assertNotEqual(Hold(offset=1000, column=1, length=2000), self.hold)
def test_length(self):
self.assertEqual(1000, self.hold.length)
def test_tail_offset(self):
self.assertEqual(2000, self.hold.tail_offset)
def test_deepcopy(self):
self.assertFalse(self.hold is Hold(offset=1000, column=1, length=1000))
self.assertFalse(self.hold is self.hold.deepcopy())
hold = self.hold
self.assertTrue(self.hold is hold)
def test_length_op(self):
self.assertEqual(1000, self.hold.length)
self.hold.length *= 2
self.assertEqual(2000, self.hold.length)
self.assertEqual(3000, self.hold.tail_offset)
# An odd occurrence, but we support negative lengths.
self.hold.length = -1000
self.assertEqual(-1000, self.hold.length)
self.assertEqual(0, self.hold.tail_offset)
if __name__ == '__main__':
unittest.main()
| 30.148936 | 80 | 0.668313 | 192 | 1,417 | 4.833333 | 0.296875 | 0.155172 | 0.090517 | 0.086207 | 0.404095 | 0.261853 | 0.232759 | 0.075431 | 0 | 0 | 0 | 0.059299 | 0.214538 | 1,417 | 46 | 81 | 30.804348 | 0.774483 | 0.087509 | 0 | 0.066667 | 0 | 0 | 0.006226 | 0 | 0 | 0 | 0 | 0 | 0.433333 | 1 | 0.233333 | false | 0 | 0.1 | 0 | 0.366667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
87a528a0e0759035d4c8aac3b8f8af17e1b78c8e | 2,679 | py | Python | apysc/_converter/cast.py | ynsnf/apysc | b10ffaf76ec6beb187477d0a744fca00e3efc3fb | [
"MIT"
] | 16 | 2021-04-16T02:01:29.000Z | 2022-01-01T08:53:49.000Z | apysc/_converter/cast.py | ynsnf/apysc | b10ffaf76ec6beb187477d0a744fca00e3efc3fb | [
"MIT"
] | 613 | 2021-03-24T03:37:38.000Z | 2022-03-26T10:58:37.000Z | apysc/_converter/cast.py | simon-ritchie/apyscript | c319f8ab2f1f5f7fad8d2a8b4fc06e7195476279 | [
"MIT"
] | 2 | 2021-06-20T07:32:58.000Z | 2021-12-26T08:22:11.000Z | """Implementation of common cast conversion.
Mainly following interfaces are defined.
- to_int_from_float
Convert float value to int.
- to_float_from_int
Convert int value to float.
- to_bool_from_int
Convert int value to bool.
"""
from typing import Any
from typing import Union
from apysc._type.int import Int
from apysc._type.number_value_interface import NumberValueInterface
def to_int_from_float(
*, int_or_float: Union[int, float, NumberValueInterface]) -> Any:
"""
Convert float value to int.
Parameters
----------
int_or_float : int or float or Int or Number
Target float value. If integer is specified, conversion
will be skipped.
Returns
-------
int_val : int or Int
Converted integer value.
"""
import apysc as ap
with ap.DebugInfo(
callable_=to_int_from_float, locals_=locals(),
module_name=__name__):
if isinstance(int_or_float, ap.Number):
return ap.Int(int_or_float)
if not isinstance(int_or_float, float):
return int_or_float
return int(int_or_float)
def to_float_from_int(
*, int_or_float: Union[int, float, NumberValueInterface]) -> Any:
"""
Convert int value to float.
Parameters
----------
int_or_float : int or float or Int or Number
Target integer value. If float is specified, conversion will
be skipped.
Returns
-------
float_val : float or Number
Converted float value.
"""
import apysc as ap
with ap.DebugInfo(
callable_=to_float_from_int, locals_=locals(),
module_name=__name__):
if isinstance(int_or_float, ap.Int):
return ap.Number(int_or_float)
if not isinstance(int_or_float, int):
return int_or_float
return float(int_or_float)
def to_bool_from_int(*, integer: Union[int, Int]) -> bool:
"""
Convert int value to bool.
Parameters
----------
integer : int or Int
Integer value to convert.
Returns
-------
bool_val : bool
Converted boolean value.
Raises
------
ValueError
If argument value isn't zero or one.
"""
import apysc as ap
with ap.DebugInfo(
callable_=to_bool_from_int, locals_=locals(),
module_name=__name__):
from apysc._validation import number_validation
number_validation.validate_int_is_zero_or_one(integer=integer)
if integer == 0:
return False
return True
| 26.009709 | 74 | 0.609183 | 330 | 2,679 | 4.669697 | 0.193939 | 0.064893 | 0.103829 | 0.044127 | 0.538611 | 0.437378 | 0.40623 | 0.32706 | 0.32706 | 0.186892 | 0 | 0.000541 | 0.309444 | 2,679 | 102 | 75 | 26.264706 | 0.832432 | 0.358716 | 0 | 0.361111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | true | 0 | 0.222222 | 0 | 0.527778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
87a7da6dc806a5d838b81a1dd164114dd8e3f7a4 | 259 | py | Python | tests/test_second_app/models.py | plumdog/django_migration_test | 8fb1cd9915bd028cc9fc4ad044070d8379d9c5b1 | [
"MIT"
] | 44 | 2015-08-28T15:47:38.000Z | 2020-10-08T16:00:07.000Z | tests/test_second_app/models.py | plumdog/django_migration_testcase | 8fb1cd9915bd028cc9fc4ad044070d8379d9c5b1 | [
"MIT"
] | 29 | 2015-08-15T22:17:43.000Z | 2020-02-26T16:41:05.000Z | tests/test_second_app/models.py | plumdog/django_migration_test | 8fb1cd9915bd028cc9fc4ad044070d8379d9c5b1 | [
"MIT"
] | 8 | 2015-08-28T16:12:51.000Z | 2019-08-21T22:35:18.000Z | from django.db import models
class MyModel(models.Model):
name = models.CharField(max_length=100)
number = models.IntegerField(null=True)
my_model = models.ForeignKey(
'test_app.MyModel', blank=True, null=True, on_delete=models.CASCADE)
| 28.777778 | 76 | 0.72973 | 35 | 259 | 5.285714 | 0.714286 | 0.086486 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013761 | 0.158301 | 259 | 8 | 77 | 32.375 | 0.834862 | 0 | 0 | 0 | 0 | 0 | 0.061776 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
87ad6c0905641caff8854ecd2bef5e807b44197e | 256 | py | Python | output/models/nist_data/atomic/long/schema_instance/nistschema_sv_iv_atomic_long_min_inclusive_3_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/nist_data/atomic/long/schema_instance/nistschema_sv_iv_atomic_long_min_inclusive_3_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/nist_data/atomic/long/schema_instance/nistschema_sv_iv_atomic_long_min_inclusive_3_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from output.models.nist_data.atomic.long.schema_instance.nistschema_sv_iv_atomic_long_min_inclusive_3_xsd.nistschema_sv_iv_atomic_long_min_inclusive_3 import NistschemaSvIvAtomicLongMinInclusive3
__all__ = [
"NistschemaSvIvAtomicLongMinInclusive3",
]
| 42.666667 | 195 | 0.894531 | 30 | 256 | 6.933333 | 0.633333 | 0.144231 | 0.134615 | 0.192308 | 0.355769 | 0.355769 | 0.355769 | 0.355769 | 0 | 0 | 0 | 0.016529 | 0.054688 | 256 | 5 | 196 | 51.2 | 0.842975 | 0 | 0 | 0 | 0 | 0 | 0.144531 | 0.144531 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
87b8a750e8a4977228f0a80fd6305b9626462129 | 14,017 | py | Python | Scripts/generated/protocolbuffers/DebugVisualization_pb2.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/generated/protocolbuffers/DebugVisualization_pb2.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/generated/protocolbuffers/DebugVisualization_pb2.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: D:\dev\TS4\_deploy\Client\Releasex64\Python\Generated\protocolbuffers\DebugVisualization_pb2.py
# Compiled at: 2020-12-13 14:22:23
# Size of source mod 2**32: 14438 bytes
from google.protobuf import descriptor
from google.protobuf import message
from google.protobuf import reflection
from google.protobuf import descriptor_pb2
import protocolbuffers.Math_pb2 as Math_pb2
DESCRIPTOR = descriptor.FileDescriptor(name='DebugVisualization.proto',
package='EA.Sims4.Network',
serialized_pb='\n\x18DebugVisualization.proto\x12\x10EA.Sims4.Network\x1a\nMath.proto"³\x01\n\x10DebugViz_LineSet\x126\n\x05lines\x18\x01 \x03(\x0b2\'.EA.Sims4.Network.DebugViz_LineSet.Line\x1ag\n\x04Line\x12(\n\x05begin\x18\x01 \x02(\x0b2\x19.EA.Sims4.Network.Vector3\x12&\n\x03end\x18\x02 \x02(\x0b2\x19.EA.Sims4.Network.Vector3\x12\r\n\x05color\x18\x03 \x02(\r"ë\x03\n\rDebugViz_Text\x12.\n\x0bposition_2d\x18\x01 \x01(\x0b2\x19.EA.Sims4.Network.Vector2\x12.\n\x0bposition_3d\x18\x02 \x01(\x0b2\x19.EA.Sims4.Network.Vector3\x12\x10\n\x08color_fg\x18\x03 \x01(\r\x12\x10\n\x08color_bg\x18\x04 \x01(\r\x12\x0c\n\x04text\x18\x05 \x02(\t\x12H\n\talignment\x18\x06 \x01(\x0e2).EA.Sims4.Network.DebugViz_Text.Alignment:\nALIGN_LEFT\x12K\n\tvertalign\x18\x07 \x01(\x0e2).EA.Sims4.Network.DebugViz_Text.VertAlign:\rVERTALIGN_TOP\x12\x11\n\tobject_id\x18\x08 \x01(\x06\x12\x12\n\nbone_index\x18\t \x01(\x05">\n\tAlignment\x12\x0e\n\nALIGN_LEFT\x10\x00\x12\x10\n\x0cALIGN_CENTER\x10\x01\x12\x0f\n\x0bALIGN_RIGHT\x10\x02"J\n\tVertAlign\x12\x11\n\rVERTALIGN_TOP\x10\x00\x12\x14\n\x10VERTALIGN_MIDDLE\x10\x01\x12\x14\n\x10VERTALIGN_BOTTOM\x10\x02"\x88\x01\n\x0eDebugViz_Layer\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x12\n\nlines_data\x18\x02 \x01(\x0c\x12\x0e\n\x06zoneId\x18\x03 \x01(\x04\x121\n\x08text_set\x18\x04 \x03(\x0b2\x1f.EA.Sims4.Network.DebugViz_Text\x12\x11\n\ttris_data\x18\x05 \x01(\x0c"E\n DebugViz_LayerUpdateNotification\x12\x11\n\tlayerName\x18\x01 \x02(\t\x12\x0e\n\x06zoneId\x18\x02 \x01(\x04')
_DEBUGVIZ_TEXT_ALIGNMENT = descriptor.EnumDescriptor(name='Alignment',
full_name='EA.Sims4.Network.DebugViz_Text.Alignment',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(name='ALIGN_LEFT',
index=0,
number=0,
options=None,
type=None),
descriptor.EnumValueDescriptor(name='ALIGN_CENTER',
index=1,
number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(name='ALIGN_RIGHT',
index=2,
number=2,
options=None,
type=None)],
containing_type=None,
options=None,
serialized_start=594,
serialized_end=656)
_DEBUGVIZ_TEXT_VERTALIGN = descriptor.EnumDescriptor(name='VertAlign',
full_name='EA.Sims4.Network.DebugViz_Text.VertAlign',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(name='VERTALIGN_TOP',
index=0,
number=0,
options=None,
type=None),
descriptor.EnumValueDescriptor(name='VERTALIGN_MIDDLE',
index=1,
number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(name='VERTALIGN_BOTTOM',
index=2,
number=2,
options=None,
type=None)],
containing_type=None,
options=None,
serialized_start=658,
serialized_end=732)
_DEBUGVIZ_LINESET_LINE = descriptor.Descriptor(name='Line',
full_name='EA.Sims4.Network.DebugViz_LineSet.Line',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(name='begin',
full_name='EA.Sims4.Network.DebugViz_LineSet.Line.begin',
index=0,
number=1,
type=11,
cpp_type=10,
label=2,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='end',
full_name='EA.Sims4.Network.DebugViz_LineSet.Line.end',
index=1,
number=2,
type=11,
cpp_type=10,
label=2,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='color',
full_name='EA.Sims4.Network.DebugViz_LineSet.Line.color',
index=2,
number=3,
type=13,
cpp_type=3,
label=2,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None)],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=135,
serialized_end=238)
_DEBUGVIZ_LINESET = descriptor.Descriptor(name='DebugViz_LineSet',
full_name='EA.Sims4.Network.DebugViz_LineSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(name='lines',
full_name='EA.Sims4.Network.DebugViz_LineSet.lines',
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None)],
extensions=[],
nested_types=[
_DEBUGVIZ_LINESET_LINE],
enum_types=[],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=59,
serialized_end=238)
_DEBUGVIZ_TEXT = descriptor.Descriptor(name='DebugViz_Text',
full_name='EA.Sims4.Network.DebugViz_Text',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(name='position_2d',
full_name='EA.Sims4.Network.DebugViz_Text.position_2d',
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='position_3d',
full_name='EA.Sims4.Network.DebugViz_Text.position_3d',
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='color_fg',
full_name='EA.Sims4.Network.DebugViz_Text.color_fg',
index=2,
number=3,
type=13,
cpp_type=3,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='color_bg',
full_name='EA.Sims4.Network.DebugViz_Text.color_bg',
index=3,
number=4,
type=13,
cpp_type=3,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='text',
full_name='EA.Sims4.Network.DebugViz_Text.text',
index=4,
number=5,
type=9,
cpp_type=9,
label=2,
has_default_value=False,
default_value=((b'').decode('utf-8')),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='alignment',
full_name='EA.Sims4.Network.DebugViz_Text.alignment',
index=5,
number=6,
type=14,
cpp_type=8,
label=1,
has_default_value=True,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='vertalign',
full_name='EA.Sims4.Network.DebugViz_Text.vertalign',
index=6,
number=7,
type=14,
cpp_type=8,
label=1,
has_default_value=True,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='object_id',
full_name='EA.Sims4.Network.DebugViz_Text.object_id',
index=7,
number=8,
type=6,
cpp_type=4,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='bone_index',
full_name='EA.Sims4.Network.DebugViz_Text.bone_index',
index=8,
number=9,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None)],
extensions=[],
nested_types=[],
enum_types=[
_DEBUGVIZ_TEXT_ALIGNMENT,
_DEBUGVIZ_TEXT_VERTALIGN],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=241,
serialized_end=732)
_DEBUGVIZ_LAYER = descriptor.Descriptor(name='DebugViz_Layer',
full_name='EA.Sims4.Network.DebugViz_Layer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(name='name',
full_name='EA.Sims4.Network.DebugViz_Layer.name',
index=0,
number=1,
type=9,
cpp_type=9,
label=2,
has_default_value=False,
default_value=((b'').decode('utf-8')),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='lines_data',
full_name='EA.Sims4.Network.DebugViz_Layer.lines_data',
index=1,
number=2,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b'',
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='zoneId',
full_name='EA.Sims4.Network.DebugViz_Layer.zoneId',
index=2,
number=3,
type=4,
cpp_type=4,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='text_set',
full_name='EA.Sims4.Network.DebugViz_Layer.text_set',
index=3,
number=4,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='tris_data',
full_name='EA.Sims4.Network.DebugViz_Layer.tris_data',
index=4,
number=5,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b'',
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None)],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=735,
serialized_end=871)
_DEBUGVIZ_LAYERUPDATENOTIFICATION = descriptor.Descriptor(name='DebugViz_LayerUpdateNotification',
full_name='EA.Sims4.Network.DebugViz_LayerUpdateNotification',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(name='layerName',
full_name='EA.Sims4.Network.DebugViz_LayerUpdateNotification.layerName',
index=0,
number=1,
type=9,
cpp_type=9,
label=2,
has_default_value=False,
default_value=((b'').decode('utf-8')),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None),
descriptor.FieldDescriptor(name='zoneId',
full_name='EA.Sims4.Network.DebugViz_LayerUpdateNotification.zoneId',
index=1,
number=2,
type=4,
cpp_type=4,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None)],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=873,
serialized_end=942)
_DEBUGVIZ_LINESET_LINE.fields_by_name['begin'].message_type = Math_pb2._VECTOR3
_DEBUGVIZ_LINESET_LINE.fields_by_name['end'].message_type = Math_pb2._VECTOR3
_DEBUGVIZ_LINESET_LINE.containing_type = _DEBUGVIZ_LINESET
_DEBUGVIZ_LINESET.fields_by_name['lines'].message_type = _DEBUGVIZ_LINESET_LINE
_DEBUGVIZ_TEXT.fields_by_name['position_2d'].message_type = Math_pb2._VECTOR2
_DEBUGVIZ_TEXT.fields_by_name['position_3d'].message_type = Math_pb2._VECTOR3
_DEBUGVIZ_TEXT.fields_by_name['alignment'].enum_type = _DEBUGVIZ_TEXT_ALIGNMENT
_DEBUGVIZ_TEXT.fields_by_name['vertalign'].enum_type = _DEBUGVIZ_TEXT_VERTALIGN
_DEBUGVIZ_TEXT_ALIGNMENT.containing_type = _DEBUGVIZ_TEXT
_DEBUGVIZ_TEXT_VERTALIGN.containing_type = _DEBUGVIZ_TEXT
_DEBUGVIZ_LAYER.fields_by_name['text_set'].message_type = _DEBUGVIZ_TEXT
DESCRIPTOR.message_types_by_name['DebugViz_LineSet'] = _DEBUGVIZ_LINESET
DESCRIPTOR.message_types_by_name['DebugViz_Text'] = _DEBUGVIZ_TEXT
DESCRIPTOR.message_types_by_name['DebugViz_Layer'] = _DEBUGVIZ_LAYER
DESCRIPTOR.message_types_by_name['DebugViz_LayerUpdateNotification'] = _DEBUGVIZ_LAYERUPDATENOTIFICATION
class DebugViz_LineSet(message.Message, metaclass=reflection.GeneratedProtocolMessageType):
class Line(message.Message, metaclass=reflection.GeneratedProtocolMessageType):
DESCRIPTOR = _DEBUGVIZ_LINESET_LINE
DESCRIPTOR = _DEBUGVIZ_LINESET
class DebugViz_Text(message.Message, metaclass=reflection.GeneratedProtocolMessageType):
DESCRIPTOR = _DEBUGVIZ_TEXT
class DebugViz_Layer(message.Message, metaclass=reflection.GeneratedProtocolMessageType):
DESCRIPTOR = _DEBUGVIZ_LAYER
class DebugViz_LayerUpdateNotification(message.Message, metaclass=reflection.GeneratedProtocolMessageType):
DESCRIPTOR = _DEBUGVIZ_LAYERUPDATENOTIFICATION | 29.886994 | 1,505 | 0.750874 | 1,914 | 14,017 | 5.247649 | 0.11651 | 0.058144 | 0.050179 | 0.067901 | 0.739546 | 0.692851 | 0.663978 | 0.584926 | 0.496615 | 0.49084 | 0 | 0.050912 | 0.124206 | 14,017 | 469 | 1,506 | 29.886994 | 0.767269 | 0.024542 | 0 | 0.742291 | 0 | 0.006608 | 0.176191 | 0.142753 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.011013 | 0 | 0.030837 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
87c49121c70cc390dc96fd03779f2c201950af9a | 97 | py | Python | aiida_firecrest/__init__.py | aiidateam/aiida-firecrest | 64c1584fdbb42c8561387932c7e23ab4bb657182 | [
"MIT"
] | null | null | null | aiida_firecrest/__init__.py | aiidateam/aiida-firecrest | 64c1584fdbb42c8561387932c7e23ab4bb657182 | [
"MIT"
] | null | null | null | aiida_firecrest/__init__.py | aiidateam/aiida-firecrest | 64c1584fdbb42c8561387932c7e23ab4bb657182 | [
"MIT"
] | null | null | null | """AiiDA Transport/Scheduler plugins for interfacing with FirecREST."""
__version__ = "0.1.0a1"
| 24.25 | 71 | 0.752577 | 12 | 97 | 5.75 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.046512 | 0.113402 | 97 | 3 | 72 | 32.333333 | 0.755814 | 0.670103 | 0 | 0 | 0 | 0 | 0.269231 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
87d49613cb46cd6438683e51d2bd6ca405a1aa67 | 16,475 | py | Python | ob_inner_table_schema_constants.py | raywill/automation.py | 5b33affd313a4aade7688daae3b086dcc244127f | [
"Apache-2.0"
] | 1 | 2021-01-15T04:28:23.000Z | 2021-01-15T04:28:23.000Z | ob_inner_table_schema_constants.py | raywill/automation.py | 5b33affd313a4aade7688daae3b086dcc244127f | [
"Apache-2.0"
] | null | null | null | ob_inner_table_schema_constants.py | raywill/automation.py | 5b33affd313a4aade7688daae3b086dcc244127f | [
"Apache-2.0"
] | 1 | 2021-01-15T04:28:27.000Z | 2021-01-15T04:28:27.000Z | table_id_map = {
1: "__all_core_table",
2: "__all_root_table",
3: "__all_table",
4: "__all_column",
5: "__all_ddl_operation",
101: "__all_meta_table",
102: "__all_user",
103: "__all_user_history",
104: "__all_database",
105: "__all_database_history",
106: "__all_tablegroup",
107: "__all_tablegroup_history",
108: "__all_tenant",
109: "__all_tenant_history",
110: "__all_table_privilege",
111: "__all_table_privilege_history",
112: "__all_database_privilege",
113: "__all_database_privilege_history",
114: "__all_table_history",
115: "__all_column_history",
116: "__all_zone",
117: "__all_server",
118: "__all_sys_parameter",
120: "__all_sys_variable",
121: "__all_sys_stat",
122: "__all_column_statistic",
123: "__all_unit",
124: "__all_unit_config",
125: "__all_resource_pool",
126: "__all_tenant_resource_usage",
127: "__all_sequence",
128: "__all_charset",
129: "__all_collation",
130: "help_topic",
131: "help_category",
132: "help_keyword",
133: "help_relation",
134: "__all_local_index_status",
135: "__all_dummy",
136: "__all_frozen_map",
137: "__all_clog_history_info",
139: "__all_clog_history_info_v2",
140: "__all_rootservice_event_history",
141: "__all_privilege",
142: "__all_outline",
143: "__all_outline_history",
144: "__all_election_event_history",
145: "__all_recyclebin",
146: "__all_part",
147: "__all_part_history",
148: "__all_sub_part",
149: "__all_sub_part_history",
150: "__all_part_info",
151: "__all_part_info_history",
152: "__all_def_sub_part",
153: "__all_def_sub_part_history",
154: "__all_server_event_history",
155: "__all_rootservice_job",
156: "__all_unit_load_history",
157: "__all_sys_variable_history",
158: "__all_restore_job",
159: "__all_restore_task",
160: "__all_restore_job_history",
161: "__all_time_zone",
162: "__all_time_zone_name",
163: "__all_time_zone_transition",
164: "__all_time_zone_transition_type",
165: "__all_ddl_id",
166: "__all_foreign_key",
167: "__all_foreign_key_history",
168: "__all_foreign_key_column",
169: "__all_foreign_key_column_history",
180: "__all_synonym",
181: "__all_synonym_history",
182: "__all_sequence_v2",
183: "__all_tenant_meta_table",
184: "__all_plan_baseline",
185: "__all_plan_baseline_history",
186: "__all_index_wait_transaction_status",
187: "__all_index_schedule_task",
188: "__all_index_checksum",
189: "__all_routine",
190: "__all_routine_history",
191: "__all_routine_param",
192: "__all_routine_param_history",
193: "__all_table_stat",
194: "__all_column_stat",
195: "__all_histogram_stat",
196: "__all_package",
197: "__all_package_history",
198: "__all_sql_execute_task",
199: "__all_index_build_stat",
200: "__all_build_index_param",
201: "__all_global_index_data_src",
202: "__all_acquired_snapshot",
203: "__all_immediate_effect_index_sstable",
204: "__all_sstable_checksum",
205: "__all_tenant_gc_partition_info",
206: "__all_constraint",
207: "__all_constraint_history",
208: "__all_ori_schema_version",
209: "__all_func",
210: "__all_func_history",
211: "__all_temp_table",
212: "__all_sstable_column_checksum",
213: "__all_sequence_object",
214: "__all_sequence_object_history",
215: "__all_sequence_value",
216: "__all_tenant_plan_baseline",
217: "__all_tenant_plan_baseline_history",
10001: "__tenant_virtual_all_table",
10002: "__tenant_virtual_table_column",
10003: "__tenant_virtual_table_index",
10004: "__tenant_virtual_show_create_database",
10005: "__tenant_virtual_show_create_table",
10006: "__tenant_virtual_session_variable",
10007: "__tenant_virtual_privilege_grant",
10008: "__all_virtual_processlist",
10009: "__tenant_virtual_warning",
10010: "__tenant_virtual_current_tenant",
10011: "__tenant_virtual_database_status",
10012: "__tenant_virtual_tenant_status",
10013: "__tenant_virtual_interm_result",
10014: "__tenant_virtual_partition_stat",
10015: "__tenant_virtual_statname",
10016: "__tenant_virtual_event_name",
10017: "__tenant_virtual_global_variable",
10018: "__tenant_virtual_show_tables",
10019: "__tenant_virtual_show_create_procedure",
11001: "__all_virtual_core_meta_table",
11002: "__all_virtual_zone_stat",
11003: "__all_virtual_plan_cache_stat",
11004: "__all_virtual_plan_stat",
11006: "__all_virtual_mem_leak_checker_info",
11007: "__all_virtual_latch",
11008: "__all_virtual_kvcache_info",
11009: "__all_virtual_data_type_class",
11010: "__all_virtual_data_type",
11011: "__all_virtual_server_stat",
11012: "__all_virtual_rebalance_task_stat",
11013: "__all_virtual_session_event",
11014: "__all_virtual_session_wait",
11015: "__all_virtual_session_wait_history",
11017: "__all_virtual_system_event",
11018: "__all_virtual_tenant_memstore_info",
11019: "__all_virtual_concurrency_object_pool",
11020: "__all_virtual_sesstat",
11021: "__all_virtual_sysstat",
11022: "__all_virtual_storage_stat",
11023: "__all_virtual_disk_stat",
11024: "__all_virtual_memstore_info",
11025: "__all_virtual_partition_info",
11026: "__all_virtual_upgrade_inspection",
11027: "__all_virtual_trans_stat",
11028: "__all_virtual_trans_mgr_stat",
11029: "__all_virtual_election_info",
11030: "__all_virtual_election_mem_stat",
11031: "__all_virtual_sql_audit",
11032: "__all_virtual_trans_mem_stat",
11033: "__all_virtual_partition_sstable_image_info",
11034: "__all_virtual_core_root_table",
11035: "__all_virtual_core_all_table",
11036: "__all_virtual_core_column_table",
11037: "__all_virtual_memory_info",
11038: "__all_virtual_tenant_stat",
11039: "__all_virtual_sys_parameter_stat",
11040: "__all_virtual_partition_replay_status",
11041: "__all_virtual_clog_stat",
11042: "__all_virtual_trace_log",
11043: "__all_virtual_engine",
11045: "__all_virtual_proxy_server_stat",
11046: "__all_virtual_proxy_sys_variable",
11047: "__all_virtual_proxy_schema",
11048: "__all_virtual_plan_cache_plan_explain",
11049: "__all_virtual_obrpc_stat",
11050: "__all_virtual_sql_plan_monitor",
11051: "__all_virtual_partition_sstable_merge_info",
11052: "__all_virtual_sql_monitor",
11053: "__tenant_virtual_outline",
11054: "__tenant_virtual_concurrent_limit_sql",
11055: "__all_virtual_sql_plan_statistics",
11056: "__all_virtual_partition_sstable_macro_info",
11057: "__all_virtual_proxy_partition_info",
11058: "__all_virtual_proxy_partition",
11059: "__all_virtual_proxy_sub_partition",
11060: "__all_virtual_proxy_route",
11061: "__all_virtual_rebalance_tenant_stat",
11062: "__all_virtual_rebalance_unit_stat",
11063: "__all_virtual_rebalance_replica_stat",
11064: "__all_virtual_partition_amplification_stat",
11067: "__all_virtual_election_event_history",
11068: "__all_virtual_partition_store_info",
11069: "__all_virtual_leader_stat",
11070: "__all_virtual_partition_migration_status",
11071: "__all_virtual_sys_task_status",
11072: "__all_virtual_macro_block_marker_status",
11073: "__all_virtual_server_clog_stat",
11074: "__all_virtual_rootservice_stat",
11075: "__all_virtual_election_priority",
11076: "__all_virtual_tenant_disk_stat",
11078: "__all_virtual_rebalance_map_stat",
11079: "__all_virtual_rebalance_map_item_stat",
11080: "__all_virtual_io_stat",
11081: "__all_virtual_long_ops_status",
11082: "__all_virtual_rebalance_unit_migrate_stat",
11083: "__all_virtual_rebalance_unit_distribution_stat",
11084: "__all_virtual_server_object_pool",
11085: "__all_virtual_trans_lock_stat",
11086: "__all_virtual_election_group_info",
11087: "__tenant_virtual_show_create_tablegroup",
11088: "__all_virtual_server_blacklist",
11089: "__all_virtual_partition_split_info",
11090: "__all_virtual_trans_result_info_stat",
11093: "__all_virtual_server_schema_info",
12000: "COLUMNS",
12001: "SESSION_VARIABLES",
12002: "TABLE_PRIVILEGES",
12003: "USER_PRIVILEGES",
12004: "SCHEMA_PRIVILEGES",
12005: "TABLE_CONSTRAINTS",
12006: "GLOBAL_STATUS",
12007: "PARTITIONS",
12008: "SESSION_STATUS",
12009: "user",
12010: "db",
12011: "__all_virtual_server_memory_info",
12012: "__all_virtual_partition_table",
12013: "__all_virtual_lock_wait_stat",
12014: "__all_virtual_partition_item",
12015: "__all_virtual_replica_task",
12016: "__all_virtual_partition_location",
12030: "proc",
12031: "__tenant_virtual_collation",
12032: "__tenant_virtual_charset",
12033: "__all_virtual_tenant_memstore_allocator_info",
12034: "__all_virtual_table_mgr",
12035: "__all_virtual_meta_table",
12036: "__all_virtual_freeze_info",
12037: "PARAMETERS",
12038: "__all_virtual_bad_block_table",
12039: "__all_virtual_px_worker_stat",
12040: "__all_virtual_trans_audit",
12041: "__all_virtual_trans_sql_audit",
12054: "__all_virtual_partition_audit",
12055: "__all_virtual_sequence_v2",
12056: "__all_virtual_sequence_value",
15001: "ALL_VIRTUAL_TABLE_AGENT",
15002: "ALL_VIRTUAL_COLUMN_AGENT",
15003: "ALL_VIRTUAL_DATABASE_AGENT",
15004: "ALL_VIRTUAL_SEQUENCE_V2_AGENT",
15005: "ALL_VIRTUAL_PART_AGENT",
15006: "ALL_VIRTUAL_SUB_PART_AGENT",
15007: "ALL_VIRTUAL_PACKAGE_AGENT",
15008: "ALL_VIRTUAL_TENANT_META_TABLE_AGENT",
15009: "ALL_VIRTUAL_SQL_AUDIT_ORA",
15010: "ALL_VIRTUAL_PLAN_STAT_ORA",
15011: "ALL_VIRTUAL_SQL_PLAN_STATISTICS_AGENT",
15012: "ALL_VIRTUAL_PLAN_CACHE_PLAN_EXPLAIN_ORA",
15013: "ALL_VIRTUAL_SEQUENCE_VALUE_AGENT",
15014: "ALL_VIRTUAL_SEQUENCE_OBJECT_AGENT",
15015: "ALL_VIRTUAL_USER_AGENT",
15016: "ALL_VIRTUAL_SYNONYM_AGENT",
15017: "ALL_VIRTUAL_FOREIGN_KEY_AGENT",
15018: "ALL_VIRTUAL_COLUMN_STAT_AGENT",
15019: "ALL_VIRTUAL_COLUMN_STATISTIC_AGENT",
15020: "ALL_VIRTUAL_PARTITION_TABLE_AGENT",
15021: "ALL_VIRTUAL_TABLE_STAT_AGENT",
15022: "ALL_VIRTUAL_RECYCLEBIN_AGENT",
15023: "TENANT_VIRTUAL_OUTLINE_AGENT",
15024: "ALL_VIRTUAL_ROUTINE_AGENT",
15025: "ALL_VIRTUAL_TABLEGROUP_AGENT",
15026: "ALL_VIRTUAL_PRIVILEGE_AGENT",
15027: "ALL_VIRTUAL_SYS_PARAMETER_STAT_AGENT",
15028: "TENANT_VIRTUAL_TABLE_INDEX_AGENT",
15029: "TENANT_VIRTUAL_CHARSET_AGENT",
15030: "TENANT_VIRTUAL_ALL_TABLE_AGENT",
15031: "TENANT_VIRTUAL_COLLATION_AGENT",
15032: "ALL_VIRTUAL_FOREIGN_KEY_COLUMN_AGENT",
15033: "ALL_VIRTUAL_SERVER_AGENT",
15034: "ALL_VIRTUAL_PLAN_CACHE_STAT_ORA",
15035: "ALL_VIRTUAL_PROCESSLIST_ORA",
15036: "ALL_VIRTUAL_SESSION_WAIT_ORA",
15037: "ALL_VIRTUAL_SESSION_WAIT_HISTORY_ORA",
15038: "ALL_VIRTUAL_MEMORY_INFO_ORA",
15039: "ALL_VIRTUAL_TENANT_MEMSTORE_INFO_ORA",
15040: "ALL_VIRTUAL_MEMSTORE_INFO_ORA",
15041: "ALL_VIRTUAL_SERVER_MEMORY_INFO_AGENT",
15042: "ALL_VIRTUAL_SESSTAT_ORA",
15043: "ALL_VIRTUAL_SYSSTAT_ORA",
15044: "ALL_VIRTUAL_SYSTEM_EVENT_ORA",
15045: "ALL_VIRTUAL_TENANT_MEMSTORE_ALLOCATOR_INFO_AGENT",
15046: "TENANT_VIRTUAL_SESSION_VARIABLE_ORA",
15047: "TENANT_VIRTUAL_GLOBAL_VARIABLE_ORA",
15048: "TENANT_VIRTUAL_SHOW_CREATE_TABLE_ORA",
15049: "TENANT_VIRTUAL_SHOW_CREATE_PROCEDURE_ORA",
15050: "TENANT_VIRTUAL_SHOW_CREATE_TABLEGROUP_ORA",
15051: "TENANT_VIRTUAL_PRIVILEGE_GRANT_ORA",
15052: "TENANT_VIRTUAL_TABLE_COLUMN_ORA",
15053: "ALL_VIRTUAL_TRACE_LOG_ORA",
15054: "TENANT_VIRTUAL_CONCURRENT_LIMIT_SQL_AGENT",
15055: "ALL_VIRTUAL_CONSTRAINT_AGENT",
20001: "gv$plan_cache_stat",
20002: "gv$plan_cache_plan_stat",
20003: "SCHEMATA",
20004: "CHARACTER_SETS",
20005: "GLOBAL_VARIABLES",
20006: "STATISTICS",
20007: "VIEWS",
20008: "TABLES",
20009: "COLLATIONS",
20010: "COLLATION_CHARACTER_SET_APPLICABILITY",
20011: "PROCESSLIST",
20012: "KEY_COLUMN_USAGE",
20013: "DBA_OUTLINES",
20014: "ENGINES",
20015: "ROUTINES",
21000: "gv$session_event",
21001: "gv$session_wait",
21002: "gv$session_wait_history",
21003: "gv$system_event",
21004: "gv$sesstat",
21005: "gv$sysstat",
21006: "v$statname",
21007: "v$event_name",
21008: "v$session_event",
21009: "v$session_wait",
21010: "v$session_wait_history",
21011: "v$sesstat",
21012: "v$sysstat",
21013: "v$system_event",
21014: "gv$sql_audit",
21015: "gv$latch",
21016: "gv$memory",
21017: "v$memory",
21018: "gv$memstore",
21019: "v$memstore",
21020: "gv$memstore_info",
21021: "v$memstore_info",
21022: "v$plan_cache_stat",
21023: "v$plan_cache_plan_stat",
21024: "gv$plan_cache_plan_explain",
21025: "v$plan_cache_plan_explain",
21026: "v$sql_audit",
21027: "v$latch",
21028: "gv$obrpc_outgoing",
21029: "v$obrpc_outgoing",
21030: "gv$obrpc_incoming",
21031: "v$obrpc_incoming",
21032: "gv$sql",
21033: "v$sql",
21034: "gv$sql_monitor",
21035: "v$sql_monitor",
21036: "gv$sql_plan_monitor",
21037: "v$sql_plan_monitor",
21038: "USER_RECYCLEBIN",
21039: "gv$outline",
21040: "gv$concurrent_limit_sql",
21041: "gv$sql_plan_statistics",
21042: "v$sql_plan_statistics",
21043: "gv$server_memstore",
21044: "gv$unit_load_balance_event_history",
21045: "gv$tenant",
21046: "gv$database",
21047: "gv$table",
21048: "gv$unit",
21049: "v$unit",
21050: "gv$partition",
21051: "v$partition",
21052: "gv$lock_wait_stat",
21053: "v$lock_wait_stat",
21054: "time_zone",
21055: "time_zone_name",
21056: "time_zone_transition",
21057: "time_zone_transition_type",
21059: "gv$session_longops",
21060: "v$session_longops",
21061: "DBA_PROCEDURES",
21062: "DBA_ARGUMENTS",
21063: "DBA_SOURCE",
21064: "gv$tenant_memstore_allocator_info",
21065: "v$tenant_memstore_allocator_info",
21066: "gv$tenant_sequence_object",
25001: "DBA_SYNONYMS",
25002: "DBA_OBJECTS",
25003: "ALL_OBJECTS",
25004: "USER_OBJECTS",
25005: "DBA_SEQUENCES",
25006: "ALL_SEQUENCES",
25007: "USER_SEQUENCES",
21073: "gv$partition_audit",
21074: "v$partition_audit",
25008: "DBA_USERS",
25009: "ALL_USERS",
25010: "ALL_SYNONYMS",
25011: "USER_SYNONYMS",
25012: "DBA_IND_COLUMNS",
25013: "ALL_IND_COLUMNS",
25014: "USER_IND_COLUMNS",
25015: "DBA_CONSTRAINTS",
25016: "ALL_CONSTRAINTS",
25017: "USER_CONSTRAINTS",
25018: "ALL_TAB_COLS_V$",
25019: "DBA_TAB_COLS_V$",
25020: "USER_TAB_COLS_V$",
25021: "ALL_TAB_COLS",
25022: "DBA_TAB_COLS",
25023: "USER_TAB_COLS",
25024: "ALL_TAB_COLUMNS",
25025: "DBA_TAB_COLUMNS",
25026: "USER_TAB_COLUMNS",
25027: "ALL_TABLES",
25028: "DBA_TABLES",
25029: "USER_TABLES",
25030: "DBA_TAB_COMMENTS",
25031: "ALL_TAB_COMMENTS",
25032: "USER_TAB_COMMENTS",
25033: "DBA_COL_COMMENTS",
25034: "ALL_COL_COMMENTS",
25035: "USER_COL_COMMENTS",
25036: "DBA_INDEXES",
25037: "ALL_INDEXES",
25038: "USER_INDEXES",
25039: "DBA_CONS_COLUMNS",
25040: "ALL_CONS_COLUMNS",
25041: "USER_CONS_COLUMNS",
25042: "USER_SEGMENTS",
25043: "DBA_SEGMENTS",
28001: "GV$OUTLINE_ORA",
28002: "GV$SQL_AUDIT_ORA",
28003: "V$SQL_AUDIT_ORA",
28004: "GV$INSTANCE",
28005: "V$INSTANCE",
28006: "GV$PLAN_CACHE_PLAN_STAT_ORA",
28007: "V$PLAN_CACHE_PLAN_STAT_ORA",
28008: "GV$PLAN_CACHE_PLAN_EXPLAIN_ORA",
28009: "V$PLAN_CACHE_PLAN_EXPLAIN_ORA",
28010: "GV$SESSION_WAIT_ORA",
28011: "V$SESSION_WAIT_ORA",
28012: "GV$SESSION_WAIT_HISTORY_ORA",
28013: "V$SESSION_WAIT_HISTORY_ORA",
28014: "GV$MEMORY_ORA",
28015: "V$MEMORY_ORA",
28016: "GV$MEMSTORE_ORA",
28017: "V$MEMSTORE_ORA",
28018: "GV$MEMSTORE_INFO_ORA",
28019: "V$MEMSTORE_INFO_ORA",
28020: "GV$SERVER_MEMSTORE_ORA",
28021: "GV$SESSTAT_ORA",
28022: "V$SESSTAT_ORA",
28023: "GV$SYSSTAT_ORA",
28024: "V$SYSSTAT_ORA",
28025: "GV$SYSTEM_EVENT_ORA",
28026: "V$SYSTEM_EVENT_ORA",
28027: "GV$TENANT_MEMSTORE_ALLOCATOR_INFO_ORA",
28028: "V$TENANT_MEMSTORE_ALLOCATOR_INFO_ORA",
28029: "GV$PLAN_CACHE_STAT_ORA",
28030: "V$PLAN_CACHE_STAT_ORA",
28031: "GV$CONCURRENT_LIMIT_SQL_ORA",
14999: "__all_virtual_plan_cache_stat",
14998: "__all_virtual_session_event",
14997: "__all_virtual_session_wait",
14996: "__all_virtual_session_wait_history",
14995: "__all_virtual_system_event",
14994: "__all_virtual_sesstat",
14993: "__all_virtual_sysstat",
14992: "__all_virtual_sql_audit",
19999: "ALL_VIRTUAL_SQL_AUDIT_ORA",
19998: "ALL_VIRTUAL_PLAN_CACHE_STAT_ORA",
19997: "ALL_VIRTUAL_SESSION_WAIT_ORA",
19996: "ALL_VIRTUAL_SESSION_WAIT_HISTORY_ORA",
19995: "ALL_VIRTUAL_SESSTAT_ORA",
19994: "ALL_VIRTUAL_SYSSTAT_ORA",
19993: "ALL_VIRTUAL_SYSTEM_EVENT_ORA"
}
| 34.611345 | 60 | 0.764127 | 2,209 | 16,475 | 4.955183 | 0.30919 | 0.143431 | 0.024301 | 0.015348 | 0.11374 | 0.022657 | 0 | 0 | 0 | 0 | 0 | 0.146718 | 0.115083 | 16,475 | 475 | 61 | 34.684211 | 0.604088 | 0 | 0 | 0 | 0 | 0 | 0.639636 | 0.468225 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
87da377718f68d8c1c818a68aa678c63ebb97769 | 139 | py | Python | plugins/bap/utils/__init__.py | gitoleg/bap-ida-python | f1cdd95578c331f1f3fba2150c2e2d134b8897f0 | [
"MIT"
] | 81 | 2016-06-10T19:07:12.000Z | 2022-03-23T08:15:41.000Z | plugins/bap/utils/__init__.py | gitoleg/bap-ida-python | f1cdd95578c331f1f3fba2150c2e2d134b8897f0 | [
"MIT"
] | 22 | 2016-06-16T19:35:59.000Z | 2020-12-10T14:53:38.000Z | plugins/bap/utils/__init__.py | gitoleg/bap-ida-python | f1cdd95578c331f1f3fba2150c2e2d134b8897f0 | [
"MIT"
] | 29 | 2016-06-10T18:26:04.000Z | 2022-02-14T06:15:30.000Z | """Commonly used utilities."""
__all__ = ('sexpr', 'bap_comment', 'run', 'ida', 'abstract_ida_plugins',
'config', 'bap_taint')
| 27.8 | 72 | 0.611511 | 15 | 139 | 5.133333 | 0.866667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.172662 | 139 | 4 | 73 | 34.75 | 0.669565 | 0.172662 | 0 | 0 | 0 | 0 | 0.522936 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
87ee0b2f2606af23a94fb990007bf660f8cc497b | 1,402 | py | Python | pip_services_runtime/commands/ICommand.py | pip-services-archive/pip-services-runtime-python | 70eca1ffc44bfdc45c9c65b0ee347fa578368849 | [
"MIT"
] | null | null | null | pip_services_runtime/commands/ICommand.py | pip-services-archive/pip-services-runtime-python | 70eca1ffc44bfdc45c9c65b0ee347fa578368849 | [
"MIT"
] | null | null | null | pip_services_runtime/commands/ICommand.py | pip-services-archive/pip-services-runtime-python | 70eca1ffc44bfdc45c9c65b0ee347fa578368849 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
pip_services_runtime.commands.ICommand
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Interface for commands.
:copyright: Digital Living Software Corp. 2015-2016, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
class ICommand(object):
"""
Interface for commands that execute functional operations.
"""
def get_name(self):
"""
Gets the command name.
Results: the command name
"""
raise NotImplementedError('Method from interface definition')
def execute(self, correlation_id, args):
"""
Executes the command given specific arguments as an input.
Args:
correlation_id: a unique correlation/transaction id
args: command arguments
Returns: an execution result.
Raises:
MicroserviceError: when execution fails for whatever reason.
"""
raise NotImplementedError('Method from interface definition')
def validate(self, args):
"""
Performs validation of the command arguments.
Args:
args: command arguments
Returns: MicroserviceError list with errors or empty list if validation was successful.
"""
raise NotImplementedError('Method from interface definition')
| 28.612245 | 95 | 0.597004 | 134 | 1,402 | 6.208955 | 0.567164 | 0.048077 | 0.108173 | 0.122596 | 0.198317 | 0.198317 | 0.134615 | 0 | 0 | 0 | 0 | 0.009259 | 0.306705 | 1,402 | 49 | 96 | 28.612245 | 0.846708 | 0.561341 | 0 | 0.428571 | 0 | 0 | 0.255319 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0 | 0 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
87fc59fa0be9de8ea4ffb9ee55cc106e739b28f8 | 3,208 | py | Python | odelab/system/nonholonomic/rolling.py | olivierverdier/odelab | ee3300c663f595c2d185a00605bcfb93649352e0 | [
"BSD-3-Clause"
] | 15 | 2016-05-23T14:56:58.000Z | 2022-03-16T23:20:58.000Z | odelab/system/nonholonomic/rolling.py | olivierverdier/odelab | ee3300c663f595c2d185a00605bcfb93649352e0 | [
"BSD-3-Clause"
] | 3 | 2016-03-04T17:56:23.000Z | 2018-04-03T03:28:46.000Z | odelab/system/nonholonomic/rolling.py | olivierverdier/odelab | ee3300c663f595c2d185a00605bcfb93649352e0 | [
"BSD-3-Clause"
] | 4 | 2016-01-31T15:49:27.000Z | 2022-02-16T06:50:31.000Z | from __future__ import division
import numpy as np
from . import NonHolonomic
class Robot(NonHolonomic):
def position(self,u):
return u[:4]
def velocity(self, u):
return u[4:8]
def lag(self,u):
return u[8:10]
def codistribution(self, u):
q2 = self.position(u)[2]
cod = np.zeros([2,4])
cod[0,0] = cod[1,1] = 1.
cod[0,3] = -np.cos(q2)
cod[1,3] = -np.sin(q2)
return cod
def force(self, u):
q = self.position(u)
f = np.zeros_like(q)
f[3] = -10*np.cos(q[3])
return f
def average_force(self, u0, u1):
q0 = self.position(u0)
t0 = q0[3]
t1 = self.position(u1)[3]
if np.allclose(t0,t1):
f = -10*np.cos(t0)
else:
f = -10*(np.sin(t1) - np.sin(t0))/(t1-t0) # check this!
fvec = np.zeros_like(q0)
fvec[3] = f
return fvec
def energy(self, u):
q3 = self.position(u)[3]
return .5*np.sum(self.momentum(u) * self.velocity(u), axis=0) + 10*np.sin(q3)
class VerticalRollingDisk(NonHolonomic):
"""
Vertical Rolling Disk
"""
size = 10 # 4+4+2
def __init__(self, mass=1., radius=1., Iflip=1., Irot=1.):
"""
:mass: mass of the disk
:radius: Radius of the disk
:Iflip: inertia momentum around the "flip" axis
:Irot: inertia momentum, around the axis of rotation symmetry of the disk (perpendicular to it)
"""
self.mass = mass
self.radius = radius
self.Iflip = Iflip
self.Irot = Irot
#def label(self, component):
#return ['x','y',u'φ',u'θ','vx','vy',u'ωφ',u'ωη',u'λ1',u'λ2'][component]
def position(self, u):
"""
Positions x,y,φ (SE(2) coordinates), θ (rotation)
"""
return u[:4]
def velocity(self, u):
return u[4:8]
def average_force(self, u0, u1):
return self.force(u0) # using the fact that the force is zero in this model
def lag(self,u):
return u[8:10]
def codistribution(self, u):
q = self.position(u)
phi = q[2]
R = self.radius
one = np.ones_like(phi)
zero = np.zeros_like(phi)
return np.array([[one, zero, zero, -R*np.cos(phi)],[zero, one, zero, -R*np.sin(phi)]])
def state(self,u):
return u[:8]
def force(self,u):
return np.zeros_like(self.position(u))
def qnorm(self, ut):
return np.sqrt(ut[0]**2 + ut[1]**2)
def energy(self, ut):
return .5*(self.mass*(ut[4]**2 + ut[5]**2) + self.Iflip*ut[6]**2 + self.Irot*ut[7]**2)
def exact(self,t,u0):
"""
Exact solution for initial condition u0 at times t
:param array(N) t: time points of size N
:param array(8+) u0: initial condition
:return: a 10xN matrix of the exact solution
"""
ohm_phi,ohm_theta = u0[6:8]
R = self.radius
rho = ohm_theta*R/ohm_phi
x_0,y_0,phi_0,theta_0 = u0[:4]
phi = ohm_phi*t+phi_0
one = np.ones_like(t)
m = self.mass
return np.vstack([rho*(np.sin(phi)-np.sin(phi_0)) + x_0,
-rho*(np.cos(phi)-np.cos(phi_0)) + y_0,
ohm_phi*t+phi_0,
ohm_theta*t+theta_0,
R*np.cos(phi)*ohm_theta,
R*np.sin(phi)*ohm_theta,
ohm_phi*one,
ohm_theta*one,
-m*ohm_phi*R*ohm_theta*np.sin(phi),
m*ohm_phi*R*ohm_theta*np.cos(phi),])
def initial(self, u00):
"""
Make sure that the constraints are fulfilled at the initial conditions.
"""
u0 = np.copy(u00)
phi = u0[2]
vtheta = u0[7]
u0[4] = np.cos(phi)*vtheta
u0[5] = np.sin(phi)*vtheta
return u0
| 22.914286 | 97 | 0.627182 | 592 | 3,208 | 3.327703 | 0.226351 | 0.030457 | 0.039086 | 0.036548 | 0.155838 | 0.135533 | 0.096954 | 0.07868 | 0.07868 | 0.07868 | 0 | 0.049016 | 0.192332 | 3,208 | 139 | 98 | 23.079137 | 0.711308 | 0.223815 | 0 | 0.241758 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.208791 | false | 0 | 0.032967 | 0.10989 | 0.472527 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
87fee6e3f5a67a91ac5ef2f61b81aae9ab0dd925 | 536 | py | Python | misc.py | vlajnaya-mol/FreelanceTelegramBot | cc1dd14e25ccb754750edeef9bc1b77007bb2592 | [
"MIT"
] | null | null | null | misc.py | vlajnaya-mol/FreelanceTelegramBot | cc1dd14e25ccb754750edeef9bc1b77007bb2592 | [
"MIT"
] | null | null | null | misc.py | vlajnaya-mol/FreelanceTelegramBot | cc1dd14e25ccb754750edeef9bc1b77007bb2592 | [
"MIT"
] | null | null | null | import os
import sys
BOT_USERNAME = os.environ.get("BOT_USERNAME")
TOKEN = os.environ.get("TOKEN")
NAME = os.environ.get("NAME")
PORT = os.environ.get("PORT")
GCP_PROJECT_NAME = os.environ.get("GCP_PROJECT_NAME")
GCP_TOPIC_NAME = os.environ.get("GCP_TOPIC_NAME")
CLIENT_ID = os.environ["CLIENT_ID"]
CLIENT_SECRET = os.environ["CLIENT_SECRET"]
DB_KEY = str.encode(os.environ.get("DB_KEY"))
DATABASE_URL = os.environ["DATABASE_URL"]
SERVICE_NAME = os.environ["SERVICE_NAME"]
def log_line(message):
sys.stdout.write(message + "\n")
| 28.210526 | 53 | 0.746269 | 86 | 536 | 4.406977 | 0.348837 | 0.261214 | 0.221636 | 0.126649 | 0.100264 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.091418 | 536 | 18 | 54 | 29.777778 | 0.778234 | 0 | 0 | 0 | 0 | 0 | 0.203358 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.133333 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e20ac036f274599dcc2d0f58d0c7d9d22296ebd6 | 186 | py | Python | tests/Tx/test_default_api.py | jawahar273/Tx | d595ebd347194402984505a051f842854ce0fc9f | [
"MIT"
] | 50 | 2018-11-10T20:01:00.000Z | 2022-01-28T19:35:57.000Z | code/bootiful-sanic/tests/test_default_api.py | harshanarayana/pycon2019 | d0bc3485f89402c01c07ccb86684a62588d6dab2 | [
"MIT"
] | 12 | 2018-11-11T03:11:36.000Z | 2019-11-12T08:08:20.000Z | code/bootiful-sanic/tests/test_default_api.py | harshanarayana/pycon2019 | d0bc3485f89402c01c07ccb86684a62588d6dab2 | [
"MIT"
] | 10 | 2018-11-11T01:52:30.000Z | 2020-07-09T15:19:29.000Z | from sanic.testing import SanicTestClient
async def test_sanic_default_api(sanic_tester: SanicTestClient):
response = await sanic_tester.get("/")
assert response.status == 200
| 26.571429 | 64 | 0.77957 | 23 | 186 | 6.086957 | 0.73913 | 0.157143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01875 | 0.139785 | 186 | 6 | 65 | 31 | 0.85625 | 0 | 0 | 0 | 0 | 0 | 0.005376 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e20e6d7b4f56eecfdcb561b4ad0b697c3008c7da | 1,082 | py | Python | awsshell/substring.py | bdharang/AWS_SHELL | 4e84552f367f4da647e10be05795b870c112e3bb | [
"Apache-2.0"
] | null | null | null | awsshell/substring.py | bdharang/AWS_SHELL | 4e84552f367f4da647e10be05795b870c112e3bb | [
"Apache-2.0"
] | null | null | null | awsshell/substring.py | bdharang/AWS_SHELL | 4e84552f367f4da647e10be05795b870c112e3bb | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
def substring_search(word, collection):
"""Finds all matches in the `collection` for the specified `word`.
If `word` is empty, returns all items in `collection`.
:type word: str
:param word: The substring to search for.
:type collection: collection, usually a list
:param collection: A collection of words to match.
:rtype: list of strings
:return: A sorted list of matching words from collection.
"""
return [item for item in sorted(collection) if item.startswith(word)]
| 37.310345 | 73 | 0.725508 | 161 | 1,082 | 4.869565 | 0.546584 | 0.076531 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009185 | 0.195009 | 1,082 | 28 | 74 | 38.642857 | 0.89093 | 0.831793 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e21450a0b13e33a8b4d7bcaa27743fd31ed10a20 | 1,546 | py | Python | users/migrations/0004_auto_20200610_1359.py | nanthony007/RaiseLeaders | 3261f91cc5367975a33ee10f35ee919d997da60d | [
"MIT"
] | 1 | 2020-05-28T19:09:59.000Z | 2020-05-28T19:09:59.000Z | users/migrations/0004_auto_20200610_1359.py | nanthony007/RaiseLeaders | 3261f91cc5367975a33ee10f35ee919d997da60d | [
"MIT"
] | 10 | 2020-05-30T20:32:55.000Z | 2022-03-12T00:24:16.000Z | users/migrations/0004_auto_20200610_1359.py | nanthony007/RaiseLeaders | 3261f91cc5367975a33ee10f35ee919d997da60d | [
"MIT"
] | 2 | 2020-05-30T20:25:07.000Z | 2020-06-08T20:20:17.000Z | # Generated by Django 3.0.3 on 2020-06-10 13:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0003_auto_20200526_1631'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='image',
),
migrations.AddField(
model_name='profile',
name='avatar',
field=models.CharField(choices=[('blue_ninja.png', 'Blue Ninja 1'), ('blue_ninjax.png', 'Blue Ninja 2'), ('blue_sword_ninja.png', 'Blue Ninja w/ Swords'), ('brown_assassin.png', 'Brown Assassin'), ('cloud_ninja.png', 'Cloud Ninja'), ('dog_ninja.png', 'Dog Ninja'), ('fast_ninja.png', 'Fast Ninja'), ('fox_ninja.png', 'Fox Ninja'), ('green_ninja.png', 'Green Ninja'), ('green_samurai.png', 'Green Samurai'), ('hip_ninja.png', 'Hip Ninja'), ('kicking_ninja.png', 'Kicking Ninja'), ('knight_ninja.png', 'Knight Ninja'), ('lady_ninja.png', 'Lady Ninja'), ('ninjax_swords_gaming.png', 'Ninja Gaming w/ Swords'), ('orange_assassin.png', 'Orange Assassin'), ('orange_ninjax_gaming.png', 'Orange Ninja Gaming'), ('orange_ninjax_team.png', 'Orange Ninja Team'), ('prince_ninja.png', 'Prince Ninja'), ('purple_ninja.png', 'Purple Ninja'), ('red_ninjax_gaming.png', 'Red Ninja Gaming'), ('shinobi.png', 'Shinobi'), ('silent_ninja.png', 'Silent Ninja'), ('squad_ninja.png', 'Squad Ninja'), ('steathly_ninja.png', 'Steathly Ninja'), ('swift_ninja.png', 'Swift Ninja')], default='fast_ninja.png', max_length=50),
),
]
| 67.217391 | 1,110 | 0.649418 | 196 | 1,546 | 4.933673 | 0.362245 | 0.148914 | 0.037229 | 0.041365 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02684 | 0.156533 | 1,546 | 22 | 1,111 | 70.272727 | 0.714724 | 0.029107 | 0 | 0.25 | 1 | 0 | 0.55437 | 0.076051 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0625 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
e215357ad1f4072f6bb93c51d45510b7b1d15aa7 | 347 | py | Python | wafer/markdown.py | drnlm/wafer | 1d843190428c401df06fcdfb89d1f9d9af67229e | [
"ISC"
] | 41 | 2015-03-16T17:47:00.000Z | 2022-01-07T04:31:21.000Z | wafer/markdown.py | drnlm/wafer | 1d843190428c401df06fcdfb89d1f9d9af67229e | [
"ISC"
] | 338 | 2015-03-15T17:26:36.000Z | 2021-12-02T04:34:53.000Z | wafer/markdown.py | drnlm/wafer | 1d843190428c401df06fcdfb89d1f9d9af67229e | [
"ISC"
] | 28 | 2015-07-27T14:11:13.000Z | 2020-11-16T03:50:30.000Z | import bleach
from bleach_allowlist import markdown_tags, markdown_attrs
from markdown import markdown
def bleached_markdown(text, **kwargs):
"""Try to avoid XSS by bleaching markdown output"""
markdown_rendered = markdown(text, **kwargs)
bleached = bleach.clean(markdown_rendered, markdown_tags, markdown_attrs)
return bleached
| 31.545455 | 77 | 0.78098 | 43 | 347 | 6.116279 | 0.488372 | 0.106464 | 0.152091 | 0.190114 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.146974 | 347 | 10 | 78 | 34.7 | 0.888514 | 0.129683 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.428571 | 0 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
356b230f9cbfe9952a24aea5193f8e0ef4bb5728 | 819 | py | Python | rest_multi_factor/factories/auth.py | KENTIVO/rest-multi-factor | 6342f473a287a546088e868e46be333323fd866c | [
"MIT"
] | null | null | null | rest_multi_factor/factories/auth.py | KENTIVO/rest-multi-factor | 6342f473a287a546088e868e46be333323fd866c | [
"MIT"
] | 7 | 2019-11-16T14:49:48.000Z | 2022-02-26T17:04:19.000Z | rest_multi_factor/factories/auth.py | KENTIVO/rest-multi-factor | 6342f473a287a546088e868e46be333323fd866c | [
"MIT"
] | 1 | 2020-02-05T15:01:53.000Z | 2020-02-05T15:01:53.000Z | """
Here is the Auth Factory defined, this factory will
generate a knox token.
"""
__all__ = ("AuthFactory",)
from factory import DjangoModelFactory, SubFactory
from rest_multi_factor.utils import get_token_model
from tests.utils import get_token_object, get_token_string
from rest_multi_factor.factories.user import UserFactory
class AuthFactory(DjangoModelFactory):
"""
Auth Factory, generates new instances
"""
class Meta:
model = get_token_model()
user = SubFactory(UserFactory)
@classmethod
def _create(cls, model_class, *args, **kwargs):
manager = cls._get_manager(model_class)
created = manager.create(*args, **kwargs)
instance = get_token_object(created)
setattr(instance, "token", get_token_string(created))
return instance
| 24.088235 | 61 | 0.71917 | 97 | 819 | 5.814433 | 0.474227 | 0.085106 | 0.046099 | 0.067376 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.19536 | 819 | 33 | 62 | 24.818182 | 0.855842 | 0.136752 | 0 | 0 | 1 | 0 | 0.023392 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.25 | 0 | 0.5625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
356ba47c4bac12651feb2e86577f9bceb5d96621 | 2,980 | py | Python | PartyTime/website/auth.py | KacchanEye/PartyTime | f8c30448edcc5ede9958ce5a5d79438a6d09ebf4 | [
"MIT"
] | null | null | null | PartyTime/website/auth.py | KacchanEye/PartyTime | f8c30448edcc5ede9958ce5a5d79438a6d09ebf4 | [
"MIT"
] | null | null | null | PartyTime/website/auth.py | KacchanEye/PartyTime | f8c30448edcc5ede9958ce5a5d79438a6d09ebf4 | [
"MIT"
] | 2 | 2022-01-03T16:38:53.000Z | 2022-02-13T16:24:36.000Z | from flask import Blueprint, render_template, request, flash, redirect, url_for
from .models import User
from werkzeug.security import generate_password_hash, check_password_hash
from . import db
from flask_login import login_user, login_required, logout_user, current_user
auth = Blueprint('auth', __name__)
@auth.route('/login', methods=['GET','POST'])
def login():
if request.method == 'POST':
id = request.form.get('id')
pw = request.form.get('pw')
user = User.query.filter_by(id=id).first()
if user:
if check_password_hash(user.pw, pw):
flash('Loggato!', category='success')
login_user(user, remember=True)
return redirect(url_for('views.index'))
else:
flash('Password errata, prova ancora!', category='error')
else:
flash('Utente non esiste, riprova o registrati!', category='error')
return render_template('login.html', user=current_user)
@auth.route('/logout')
@login_required
def logout():
logout_user()
return redirect(url_for('auth.login'))
@auth.route('/signup', methods=['GET','POST'])
def signup():
if request.method == 'POST':
#fetch data
userDetails = request.form
id = userDetails['id']
pw1 = userDetails['pw1']
pw2 = userDetails['pw2']
nome = userDetails['nome']
cogn = userDetails['cogn']
cell = userDetails['cell']
mail = userDetails['mail']
#controlli sull input
user = User.query.filter_by(id=id).first()
if user:
flash("UserId gia' esistente", category='error')
elif len(id) <= 4:
flash('UserId non valido (almeno 4 caratteri)', category='error')
elif len(pw1) <=7:
flash('Password non valida (almeno 7 caratteri)', category='error')
elif pw1 != pw2:
flash('Password non coincidono', category='error')
elif len(nome) <= 3:
flash('Nome non valido (almeno 3 caratteri)', category='error')
elif len(cogn) <= 3:
flash('Cognome non valido (almeno 3 caratteri)', category='error')
elif len(cell) != 10:
flash('Numero di cellulare non valido', category='error')
elif len(mail) <= 4:
flash('Email non valida (almeno 4 caratteri)', category='error')
else:
#add user
new_user = User(id=id, pw=generate_password_hash(pw1, method='sha256'), nome=nome, cogn=cogn, cell=cell, mail=mail)
db.session.add(new_user)
db.session.commit()
flash('Registrato!', category='succes')
login_user(new_user, remember=True)
return redirect(url_for('views.index'))
return render_template('signup.html', user=current_user) | 37.721519 | 128 | 0.573154 | 337 | 2,980 | 4.964392 | 0.293769 | 0.077705 | 0.07113 | 0.071727 | 0.190675 | 0.151823 | 0.151823 | 0.151823 | 0.151823 | 0.043036 | 0 | 0.011015 | 0.299329 | 2,980 | 79 | 129 | 37.721519 | 0.79023 | 0.012752 | 0 | 0.174603 | 1 | 0 | 0.191824 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0.095238 | 0.079365 | 0 | 0.206349 | 0.031746 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
356dc9b28fde66b4c591f9744180c11e7b339424 | 2,913 | py | Python | third_party/webrtc/src/chromium/src/tools/perf/benchmarks/memory_benchmark.py | bopopescu/webrtc-streaming-node | 727a441204344ff596401b0253caac372b714d91 | [
"MIT"
] | 8 | 2016-02-08T11:59:31.000Z | 2020-05-31T15:19:54.000Z | third_party/webrtc/src/chromium/src/tools/perf/benchmarks/memory_benchmark.py | bopopescu/webrtc-streaming-node | 727a441204344ff596401b0253caac372b714d91 | [
"MIT"
] | 1 | 2021-05-05T11:11:31.000Z | 2021-05-05T11:11:31.000Z | third_party/webrtc/src/chromium/src/tools/perf/benchmarks/memory_benchmark.py | bopopescu/webrtc-streaming-node | 727a441204344ff596401b0253caac372b714d91 | [
"MIT"
] | 7 | 2016-02-09T09:28:14.000Z | 2020-07-25T19:03:36.000Z | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
from core import perf_benchmark
from telemetry import benchmark
from telemetry.timeline import tracing_category_filter
from telemetry.web_perf import timeline_based_measurement
import page_sets
class _MemoryBenchmark(perf_benchmark.PerfBenchmark):
"""Base class for timeline based memory benchmarks."""
def SetExtraBrowserOptions(self, options):
# TODO(perezju): Temporary workaround to disable periodic memory dumps.
# See: http://crbug.com/513692
options.AppendExtraBrowserArgs('--enable-memory-benchmarking')
def CreateTimelineBasedMeasurementOptions(self):
# Enable only memory-infra, to get memory dumps, and blink.console, to get
# the timeline markers used for mapping threads to tabs.
trace_memory = tracing_category_filter.TracingCategoryFilter(
filter_string='-*,blink.console,disabled-by-default-memory-infra')
return timeline_based_measurement.Options(overhead_level=trace_memory)
# TODO(bashi): Workaround for http://crbug.com/532075
# @benchmark.Enabled('android') shouldn't be needed.
@benchmark.Enabled('android')
class MemoryHealthPlan(_MemoryBenchmark):
"""Timeline based benchmark for the Memory Health Plan."""
_RE_BENCHMARK_VALUES = re.compile('(fore|back)ground-memory_')
page_set = page_sets.MemoryHealthStory
@classmethod
def Name(cls):
return 'memory.memory_health_plan'
@classmethod
def ValueCanBeAddedPredicate(cls, value, is_first_result):
# TODO(perezju): Do not ignore baidu failures http://crbug.com/538143
return (bool(cls._RE_BENCHMARK_VALUES.match(value.name)) and
not ('baidu' in value.page.name and value.values is None))
# TODO(bashi): Workaround for http://crbug.com/532075
# @benchmark.Enabled('android') shouldn't be needed.
@benchmark.Enabled('android')
class RendererMemoryBlinkMemoryMobile(_MemoryBenchmark):
"""Timeline based benchmark for measuring memory consumption on mobile
sites on which blink's memory consumption is relatively high."""
_RE_RENDERER_VALUES = re.compile('.+-memory_.+_renderer')
page_set = page_sets.BlinkMemoryMobilePageSet
def SetExtraBrowserOptions(self, options):
super(RendererMemoryBlinkMemoryMobile, self).SetExtraBrowserOptions(
options)
options.AppendExtraBrowserArgs([
# TODO(bashi): Temporary workaround for http://crbug.com/461788
'--no-sandbox',
# Ignore certs errors because record_wpr cannot handle certs correctly
# in some cases (e.g. WordPress).
'--ignore-certificate-errors',
])
@classmethod
def Name(cls):
return 'memory.blink_memory_mobile'
@classmethod
def ValueCanBeAddedPredicate(cls, value, is_first_result):
return bool(cls._RE_RENDERER_VALUES.match(value.name))
| 35.52439 | 78 | 0.760041 | 352 | 2,913 | 6.15625 | 0.431818 | 0.029995 | 0.027688 | 0.030457 | 0.232118 | 0.183664 | 0.153207 | 0.153207 | 0.098754 | 0.098754 | 0 | 0.013677 | 0.146584 | 2,913 | 81 | 79 | 35.962963 | 0.858005 | 0.36114 | 0 | 0.292683 | 0 | 0 | 0.126845 | 0.109896 | 0 | 0 | 0 | 0.024691 | 0 | 1 | 0.170732 | false | 0 | 0.146341 | 0.097561 | 0.609756 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
357449bae962838ddb6b706e383c34ef5ec6dab6 | 620 | py | Python | bsff/actors/migrations/0003_auto_20210517_0328.py | ErikSeguinte/BSFF_Django | a4dde02b2af233bbcf0c625c20a5f2814a8ca214 | [
"MIT"
] | null | null | null | bsff/actors/migrations/0003_auto_20210517_0328.py | ErikSeguinte/BSFF_Django | a4dde02b2af233bbcf0c625c20a5f2814a8ca214 | [
"MIT"
] | null | null | null | bsff/actors/migrations/0003_auto_20210517_0328.py | ErikSeguinte/BSFF_Django | a4dde02b2af233bbcf0c625c20a5f2814a8ca214 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.11 on 2021-05-17 03:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('actors', '0002_auto_20210517_0212'),
]
operations = [
migrations.AlterField(
model_name='actor',
name='bio',
field=models.TextField(blank=True, verbose_name='Bio'),
),
migrations.AlterField(
model_name='actor',
name='short_name',
field=models.CharField(blank=True, max_length=255, verbose_name='First name, short name, or use-name'),
),
]
| 25.833333 | 115 | 0.595161 | 69 | 620 | 5.217391 | 0.637681 | 0.111111 | 0.138889 | 0.161111 | 0.211111 | 0.211111 | 0 | 0 | 0 | 0 | 0 | 0.078829 | 0.283871 | 620 | 23 | 116 | 26.956522 | 0.731982 | 0.074194 | 0 | 0.352941 | 1 | 0 | 0.157343 | 0.04021 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.058824 | 0 | 0.235294 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
3574e041f95f3bf72d06048f3af3dd71502784ac | 1,300 | py | Python | lib/handlers/help.py | Dylnuge/lunch-bot | 8770bac9e7a824d9ed75e7aa3abcc112be840509 | [
"MIT"
] | 1 | 2020-02-19T19:52:41.000Z | 2020-02-19T19:52:41.000Z | lib/handlers/help.py | Dylnuge/lunch-bot | 8770bac9e7a824d9ed75e7aa3abcc112be840509 | [
"MIT"
] | 11 | 2020-01-14T23:32:40.000Z | 2020-02-07T17:51:13.000Z | lib/handlers/help.py | Dylnuge/lunch-bot | 8770bac9e7a824d9ed75e7aa3abcc112be840509 | [
"MIT"
] | 2 | 2020-02-19T19:53:30.000Z | 2020-02-20T15:02:53.000Z | from lib import common
from lib.handlers import HandlerParams
HELP_MESSAGE = """
Lunch Bot Help
Available Commands:
`help` Displays all available commands that Lunch Bot understands
`make-plan [restaurant] [time]` Creates a lunch plan for a given place and time. [restaurant] and [time] must not \
contain any spaces.
`show-plans` Shows all active lunch plans along with their associated lunch_id
`rsvp [restaurant] (time)` RSVPs to a certain lunch plan, given its [lunch_id]. To see every lunch_id, use the \
show-plans command. If there are multiple lunch plans at the same restaurant, use the time to disambiguate them.
`my-plans` Shows all lunch plans you have currently RSVP'd to.
`un-rsvp [restaurant] (time)` Removes your RSVP from a certain lunch plan, given its [lunch_id]. To see every \
lunch_id, use the show-plans command. If there are multiple lunch plans at the same restaurant, use the time to \
disambiguate them.
`delete-plan [restaurant] (time)` Deletes a certain lunch plan, given its [lunch_id]. To see every lunch_id, use the \
show-plans command. If there are multiple lunch plans at the same restaurant, use the time to disambiguate them."""
def handle_help(params: HandlerParams):
common.send_reply(
params.client, params.message, HELP_MESSAGE,
)
| 38.235294 | 118 | 0.759231 | 206 | 1,300 | 4.737864 | 0.359223 | 0.050205 | 0.039959 | 0.052254 | 0.44877 | 0.44877 | 0.44877 | 0.44877 | 0.44877 | 0.44877 | 0 | 0 | 0.166923 | 1,300 | 33 | 119 | 39.393939 | 0.9012 | 0 | 0 | 0 | 0 | 0.333333 | 0.838462 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.095238 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
35779f8256fe59c7239a7049e60484217c085fa9 | 13,419 | py | Python | map_label_tool/py_proto/modules/common/latency_recorder/proto/latency_record_pb2.py | freeclouds/OpenHDMap | b61c159fbdf4f50ae1d1650421596b28863f39be | [
"Apache-2.0"
] | null | null | null | map_label_tool/py_proto/modules/common/latency_recorder/proto/latency_record_pb2.py | freeclouds/OpenHDMap | b61c159fbdf4f50ae1d1650421596b28863f39be | [
"Apache-2.0"
] | null | null | null | map_label_tool/py_proto/modules/common/latency_recorder/proto/latency_record_pb2.py | freeclouds/OpenHDMap | b61c159fbdf4f50ae1d1650421596b28863f39be | [
"Apache-2.0"
] | 1 | 2021-05-26T08:42:11.000Z | 2021-05-26T08:42:11.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: modules/common/latency_recorder/proto/latency_record.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from modules.common.proto import header_pb2 as modules_dot_common_dot_proto_dot_header__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='modules/common/latency_recorder/proto/latency_record.proto',
package='apollo.common',
syntax='proto2',
serialized_pb=_b('\n:modules/common/latency_recorder/proto/latency_record.proto\x12\rapollo.common\x1a!modules/common/proto/header.proto\"I\n\rLatencyRecord\x12\x12\n\nbegin_time\x18\x01 \x01(\x04\x12\x10\n\x08\x65nd_time\x18\x02 \x01(\x04\x12\x12\n\nmessage_id\x18\x03 \x01(\x04\"\x85\x01\n\x10LatencyRecordMap\x12%\n\x06header\x18\x01 \x01(\x0b\x32\x15.apollo.common.Header\x12\x13\n\x0bmodule_name\x18\x02 \x01(\t\x12\x35\n\x0flatency_records\x18\x03 \x03(\x0b\x32\x1c.apollo.common.LatencyRecord\"z\n\x0bLatencyStat\x12)\n\x0cmin_duration\x18\x01 \x01(\x04:\x13\x39\x32\x32\x33\x33\x37\x32\x30\x33\x36\x38\x35\x34\x37\x37\x35\x38\x30\x38\x12\x14\n\x0cmax_duration\x18\x02 \x01(\x04\x12\x15\n\raver_duration\x18\x03 \x01(\x04\x12\x13\n\x0bsample_size\x18\x04 \x01(\r\"\xb5\x01\n\x0cLatencyTrack\x12\x46\n\rlatency_track\x18\x01 \x03(\x0b\x32/.apollo.common.LatencyTrack.LatencyTrackMessage\x1a]\n\x13LatencyTrackMessage\x12\x14\n\x0clatency_name\x18\x01 \x01(\t\x12\x30\n\x0clatency_stat\x18\x02 \x01(\x0b\x32\x1a.apollo.common.LatencyStat\"\x9f\x01\n\rLatencyReport\x12%\n\x06header\x18\x01 \x01(\x0b\x32\x15.apollo.common.Header\x12\x31\n\x0c\x65\x32\x65s_latency\x18\x02 \x01(\x0b\x32\x1b.apollo.common.LatencyTrack\x12\x34\n\x0fmodules_latency\x18\x03 \x01(\x0b\x32\x1b.apollo.common.LatencyTrack')
,
dependencies=[modules_dot_common_dot_proto_dot_header__pb2.DESCRIPTOR,])
_LATENCYRECORD = _descriptor.Descriptor(
name='LatencyRecord',
full_name='apollo.common.LatencyRecord',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='begin_time', full_name='apollo.common.LatencyRecord.begin_time', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='end_time', full_name='apollo.common.LatencyRecord.end_time', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message_id', full_name='apollo.common.LatencyRecord.message_id', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=112,
serialized_end=185,
)
_LATENCYRECORDMAP = _descriptor.Descriptor(
name='LatencyRecordMap',
full_name='apollo.common.LatencyRecordMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='header', full_name='apollo.common.LatencyRecordMap.header', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='module_name', full_name='apollo.common.LatencyRecordMap.module_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='latency_records', full_name='apollo.common.LatencyRecordMap.latency_records', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=188,
serialized_end=321,
)
_LATENCYSTAT = _descriptor.Descriptor(
name='LatencyStat',
full_name='apollo.common.LatencyStat',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_duration', full_name='apollo.common.LatencyStat.min_duration', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=True, default_value=9223372036854775808,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_duration', full_name='apollo.common.LatencyStat.max_duration', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aver_duration', full_name='apollo.common.LatencyStat.aver_duration', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sample_size', full_name='apollo.common.LatencyStat.sample_size', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=323,
serialized_end=445,
)
_LATENCYTRACK_LATENCYTRACKMESSAGE = _descriptor.Descriptor(
name='LatencyTrackMessage',
full_name='apollo.common.LatencyTrack.LatencyTrackMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='latency_name', full_name='apollo.common.LatencyTrack.LatencyTrackMessage.latency_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='latency_stat', full_name='apollo.common.LatencyTrack.LatencyTrackMessage.latency_stat', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=536,
serialized_end=629,
)
_LATENCYTRACK = _descriptor.Descriptor(
name='LatencyTrack',
full_name='apollo.common.LatencyTrack',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='latency_track', full_name='apollo.common.LatencyTrack.latency_track', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_LATENCYTRACK_LATENCYTRACKMESSAGE, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=448,
serialized_end=629,
)
_LATENCYREPORT = _descriptor.Descriptor(
name='LatencyReport',
full_name='apollo.common.LatencyReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='header', full_name='apollo.common.LatencyReport.header', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='e2es_latency', full_name='apollo.common.LatencyReport.e2es_latency', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='modules_latency', full_name='apollo.common.LatencyReport.modules_latency', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=632,
serialized_end=791,
)
_LATENCYRECORDMAP.fields_by_name['header'].message_type = modules_dot_common_dot_proto_dot_header__pb2._HEADER
_LATENCYRECORDMAP.fields_by_name['latency_records'].message_type = _LATENCYRECORD
_LATENCYTRACK_LATENCYTRACKMESSAGE.fields_by_name['latency_stat'].message_type = _LATENCYSTAT
_LATENCYTRACK_LATENCYTRACKMESSAGE.containing_type = _LATENCYTRACK
_LATENCYTRACK.fields_by_name['latency_track'].message_type = _LATENCYTRACK_LATENCYTRACKMESSAGE
_LATENCYREPORT.fields_by_name['header'].message_type = modules_dot_common_dot_proto_dot_header__pb2._HEADER
_LATENCYREPORT.fields_by_name['e2es_latency'].message_type = _LATENCYTRACK
_LATENCYREPORT.fields_by_name['modules_latency'].message_type = _LATENCYTRACK
DESCRIPTOR.message_types_by_name['LatencyRecord'] = _LATENCYRECORD
DESCRIPTOR.message_types_by_name['LatencyRecordMap'] = _LATENCYRECORDMAP
DESCRIPTOR.message_types_by_name['LatencyStat'] = _LATENCYSTAT
DESCRIPTOR.message_types_by_name['LatencyTrack'] = _LATENCYTRACK
DESCRIPTOR.message_types_by_name['LatencyReport'] = _LATENCYREPORT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LatencyRecord = _reflection.GeneratedProtocolMessageType('LatencyRecord', (_message.Message,), dict(
DESCRIPTOR = _LATENCYRECORD,
__module__ = 'modules.common.latency_recorder.proto.latency_record_pb2'
# @@protoc_insertion_point(class_scope:apollo.common.LatencyRecord)
))
_sym_db.RegisterMessage(LatencyRecord)
LatencyRecordMap = _reflection.GeneratedProtocolMessageType('LatencyRecordMap', (_message.Message,), dict(
DESCRIPTOR = _LATENCYRECORDMAP,
__module__ = 'modules.common.latency_recorder.proto.latency_record_pb2'
# @@protoc_insertion_point(class_scope:apollo.common.LatencyRecordMap)
))
_sym_db.RegisterMessage(LatencyRecordMap)
LatencyStat = _reflection.GeneratedProtocolMessageType('LatencyStat', (_message.Message,), dict(
DESCRIPTOR = _LATENCYSTAT,
__module__ = 'modules.common.latency_recorder.proto.latency_record_pb2'
# @@protoc_insertion_point(class_scope:apollo.common.LatencyStat)
))
_sym_db.RegisterMessage(LatencyStat)
LatencyTrack = _reflection.GeneratedProtocolMessageType('LatencyTrack', (_message.Message,), dict(
LatencyTrackMessage = _reflection.GeneratedProtocolMessageType('LatencyTrackMessage', (_message.Message,), dict(
DESCRIPTOR = _LATENCYTRACK_LATENCYTRACKMESSAGE,
__module__ = 'modules.common.latency_recorder.proto.latency_record_pb2'
# @@protoc_insertion_point(class_scope:apollo.common.LatencyTrack.LatencyTrackMessage)
))
,
DESCRIPTOR = _LATENCYTRACK,
__module__ = 'modules.common.latency_recorder.proto.latency_record_pb2'
# @@protoc_insertion_point(class_scope:apollo.common.LatencyTrack)
))
_sym_db.RegisterMessage(LatencyTrack)
_sym_db.RegisterMessage(LatencyTrack.LatencyTrackMessage)
LatencyReport = _reflection.GeneratedProtocolMessageType('LatencyReport', (_message.Message,), dict(
DESCRIPTOR = _LATENCYREPORT,
__module__ = 'modules.common.latency_recorder.proto.latency_record_pb2'
# @@protoc_insertion_point(class_scope:apollo.common.LatencyReport)
))
_sym_db.RegisterMessage(LatencyReport)
# @@protoc_insertion_point(module_scope)
| 39.008721 | 1,307 | 0.759371 | 1,667 | 13,419 | 5.811638 | 0.113977 | 0.044591 | 0.031792 | 0.045417 | 0.650186 | 0.581751 | 0.540256 | 0.520851 | 0.505058 | 0.495562 | 0 | 0.039008 | 0.119308 | 13,419 | 343 | 1,308 | 39.122449 | 0.780758 | 0.045532 | 0 | 0.652459 | 1 | 0.003279 | 0.241072 | 0.195983 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.022951 | 0 | 0.022951 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
357c66c1e19296db388b12cf4277bdfc1d83228a | 5,547 | py | Python | pysnmp-with-texts/TIPPINGPOINT-REG-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/TIPPINGPOINT-REG-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/TIPPINGPOINT-REG-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module TIPPINGPOINT-REG-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TIPPINGPOINT-REG-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:23:40 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
NotificationType, Gauge32, enterprises, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, Counter32, ObjectIdentity, MibIdentifier, ModuleIdentity, Counter64, Bits, iso, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "Gauge32", "enterprises", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Unsigned32", "Counter32", "ObjectIdentity", "MibIdentifier", "ModuleIdentity", "Counter64", "Bits", "iso", "IpAddress")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
tippingpoint = ModuleIdentity((1, 3, 6, 1, 4, 1, 10734))
tippingpoint.setRevisions(('2016-05-25 18:54',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: tippingpoint.setRevisionsDescriptions(('Updated copyright information. Minor MIB syntax fixes.',))
if mibBuilder.loadTexts: tippingpoint.setLastUpdated('201605251854Z')
if mibBuilder.loadTexts: tippingpoint.setOrganization('Trend Micro, Inc.')
if mibBuilder.loadTexts: tippingpoint.setContactInfo('www.trendmicro.com')
if mibBuilder.loadTexts: tippingpoint.setDescription("Definitions of registration identities for all TPT modules. Copyright (C) 2016 Trend Micro Incorporated. All Rights Reserved. Trend Micro makes no warranty of any kind with regard to this material, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose. Trend Micro shall not be liable for errors contained herein or for incidental or consequential damages in connection with the furnishing, performance, or use of this material. This document contains proprietary information, which is protected by copyright. No part of this document may be photocopied, reproduced, or translated into another language without the prior written consent of Trend Micro. The information is provided 'as is' without warranty of any kind and is subject to change without notice. The only warranties for Trend Micro products and services are set forth in the express warranty statements accompanying such products and services. Nothing herein should be construed as constituting an additional warranty. Trend Micro shall not be liable for technical or editorial errors or omissions contained herein. TippingPoint(R), the TippingPoint logo, and Digital Vaccine(R) are registered trademarks of Trend Micro. All other company and product names may be trademarks of their respective holders. All rights reserved. This document contains confidential information, trade secrets or both, which are the property of Trend Micro. No part of this documentation may be reproduced in any form or by any means or used to make any derivative work (such as translation, transformation, or adaptation) without written permission from Trend Micro or one of its subsidiaries. All other company and product names may be trademarks of their respective holders. ")
tpt_reg = ObjectIdentity((1, 3, 6, 1, 4, 1, 10734, 1)).setLabel("tpt-reg")
if mibBuilder.loadTexts: tpt_reg.setStatus('current')
if mibBuilder.loadTexts: tpt_reg.setDescription('Sub-tree for the registered modules for TippingPoint Technologies.')
tpt_generic = ObjectIdentity((1, 3, 6, 1, 4, 1, 10734, 2)).setLabel("tpt-generic")
if mibBuilder.loadTexts: tpt_generic.setStatus('current')
if mibBuilder.loadTexts: tpt_generic.setDescription('Sub-tree for common object and event definitions.')
tpt_products = ObjectIdentity((1, 3, 6, 1, 4, 1, 10734, 3)).setLabel("tpt-products")
if mibBuilder.loadTexts: tpt_products.setStatus('current')
if mibBuilder.loadTexts: tpt_products.setDescription('Sub-tree for specific object and event definitions.')
tpt_caps = ObjectIdentity((1, 3, 6, 1, 4, 1, 10734, 4)).setLabel("tpt-caps")
if mibBuilder.loadTexts: tpt_caps.setStatus('current')
if mibBuilder.loadTexts: tpt_caps.setDescription('Sub-tree for agent profiles.')
tpt_reqs = ObjectIdentity((1, 3, 6, 1, 4, 1, 10734, 5)).setLabel("tpt-reqs")
if mibBuilder.loadTexts: tpt_reqs.setStatus('current')
if mibBuilder.loadTexts: tpt_reqs.setDescription('Sub-tree for management application requirements.')
tpt_expr = ObjectIdentity((1, 3, 6, 1, 4, 1, 10734, 6)).setLabel("tpt-expr")
if mibBuilder.loadTexts: tpt_expr.setStatus('current')
if mibBuilder.loadTexts: tpt_expr.setDescription('Sub-tree for experimental definitions.')
mibBuilder.exportSymbols("TIPPINGPOINT-REG-MIB", tippingpoint=tippingpoint, tpt_expr=tpt_expr, tpt_products=tpt_products, tpt_generic=tpt_generic, PYSNMP_MODULE_ID=tippingpoint, tpt_reg=tpt_reg, tpt_reqs=tpt_reqs, tpt_caps=tpt_caps)
| 132.071429 | 1,831 | 0.797007 | 722 | 5,547 | 6.078947 | 0.360111 | 0.04648 | 0.08134 | 0.065619 | 0.321941 | 0.217589 | 0.162907 | 0.147186 | 0.11301 | 0.11301 | 0 | 0.037164 | 0.102578 | 5,547 | 41 | 1,832 | 135.292683 | 0.844516 | 0.060213 | 0 | 0 | 0 | 0.030303 | 0.528055 | 0.008455 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.181818 | 0 | 0.181818 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
357f9519d70a1e9f5a9595ba504cba1c4aa05e85 | 2,182 | py | Python | pyobsim/order.py | hetra/PyOBSim | 036ddf1418674114e5e7b2a9bd6974352f2f0293 | [
"MIT"
] | 23 | 2017-10-27T17:34:01.000Z | 2022-03-21T16:07:31.000Z | pyobsim/order.py | hetra/PyOBSim | 036ddf1418674114e5e7b2a9bd6974352f2f0293 | [
"MIT"
] | 1 | 2018-12-02T05:07:22.000Z | 2020-02-12T01:34:11.000Z | pyobsim/order.py | hetra/PyOBSim | 036ddf1418674114e5e7b2a9bd6974352f2f0293 | [
"MIT"
] | 10 | 2018-01-24T22:57:07.000Z | 2021-08-13T00:15:15.000Z | CENTS_PER_DOLLAR = 100
class Order(object):
def __init__(self, id, owner, ticker, type, price, qty):
if int(id) < 0:
raise ValueError()
if round(float(price), 2) <= 0:
raise ValueError()
if int(qty) <= 0:
raise ValueError()
self.__id = id
self.__owner = owner
self.__ticker = str(ticker)
self.__type = type
self.__price = round(float(price), 2)
self.__qty = int(qty)
def __eq__(self, o):
if isinstance(o, Order):
return self.id == o.id and self.owner == o.owner and \
self.ticker == o.ticker and self.type == o.type and \
self.price == o.price and self.qty == o.qty
else:
return False
@property
def id(self):
return self.__id
@property
def owner(self):
return self.__owner
@owner.setter
def owner(self, owner):
self.__owner = owner
@property
def ticker(self):
return self.__ticker
@ticker.setter
def ticker(self, ticker):
self.__ticker = str(ticker)
@property
def type(self):
return self.__type
@property
def price(self):
return round(self.__price, 2)
@price.setter
def price(self, price):
if round(float(price)) <= 0:
raise ValueError()
self.__price = round(float(price))
@property
def qty(self):
return self.__qty
@qty.setter
def qty(self, qty):
if int(qty) <= 0:
raise ValueError()
self.__qty = int(qty)
def __str__(self):
return "{0}: {1} for {2} @ ${3} by {4}".format(self.ticker, self.type,
self.qty, self.price,
self.owner)
def __repr__(self):
s = "ID: " + str(self.id) + "\n"
s += "Owner: " + str(self.owner) + "\n"
s += "Ticker: " + self.ticker + "\n"
s += "Type: " + str(self.type) + "\n"
s += "Price: $" + str(self.price) + "\n"
s += "Quantity: " + str(self.qty) + "\n"
return s
| 23.978022 | 78 | 0.493126 | 259 | 2,182 | 3.96139 | 0.169884 | 0.061404 | 0.077973 | 0.05848 | 0.128655 | 0.054581 | 0.054581 | 0 | 0 | 0 | 0 | 0.011713 | 0.373969 | 2,182 | 90 | 79 | 24.244444 | 0.739385 | 0 | 0 | 0.279412 | 0 | 0 | 0.038973 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.205882 | false | 0 | 0 | 0.102941 | 0.367647 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
358d9cb63c69fbc2eae3210b79b07de9a1432219 | 2,614 | py | Python | mmdnn/conversion/caffe/shape.py | isikdogan/MMdnn | ed2511abe27fc836eda973d262ff748a2b7a2679 | [
"MIT"
] | null | null | null | mmdnn/conversion/caffe/shape.py | isikdogan/MMdnn | ed2511abe27fc836eda973d262ff748a2b7a2679 | [
"MIT"
] | null | null | null | mmdnn/conversion/caffe/shape.py | isikdogan/MMdnn | ed2511abe27fc836eda973d262ff748a2b7a2679 | [
"MIT"
] | 1 | 2020-01-13T16:02:47.000Z | 2020-01-13T16:02:47.000Z | from collections import namedtuple
import math
TensorShape = namedtuple('TensorShape', ['batch_size', 'channels', 'height', 'width'])
def get_filter_output_shape(i_h, i_w, params, round_func):
o_h = (i_h + 2 * params.p_h - params.k_h) / float(params.s_h) + 1
o_w = (i_w + 2 * params.p_w - params.k_w) / float(params.s_w) + 1
return (int(round_func(o_h)), int(round_func(o_w)))
def get_strided_kernel_output_shape(node, round_func):
assert node.layer is not None
input_shape = node.get_only_parent()[0].output_shape
params = node.kernel_parameters
o_h, o_w = get_filter_output_shape(input_shape.height, input_shape.width,
params, round_func)
params = node.parameters
has_c_o = hasattr(params, 'num_output')
c = params.num_output if has_c_o else input_shape.channels
return TensorShape(input_shape.batch_size, c, o_h, o_w)
def shape_not_implemented(node):
raise NotImplementedError
def shape_identity(node):
assert len(node.parents) > 0
return node.parents[0][0].output_shape
def shape_scalar(node):
return TensorShape(1, 1, 1, 1)
def shape_data(node):
if node.output_shape:
# Old-style input specification
return node.output_shape
try:
# New-style input specification
return tuple(map(int, node.parameters.shape[0].dim))
except:
# We most likely have a data layer on our hands. The problem is,
# Caffe infers the dimensions of the data from the source (eg: LMDB).
# We want to avoid reading datasets here. Fail for now.
# This can be temporarily fixed by transforming the data layer to
# Caffe's "input" layer (as is usually used in the "deploy" version).
# TODO: Find a better solution for this.
pass
def shape_mem_data(node):
params = node.parameters
return TensorShape(params.batch_size, params.channels, params.height, params.width)
def shape_concat(node):
axis = node.parameters.axis
output_shape = None
for parent, idx in node.parents:
if output_shape is None:
output_shape = list(parent.output_shape)
else:
output_shape[axis] += parent.output_shape[axis]
return tuple(output_shape)
def shape_convolution(node):
return get_strided_kernel_output_shape(node, math.floor)
def shape_pool(node):
return get_strided_kernel_output_shape(node, math.ceil)
def shape_inner_product(node):
input_shape = node.get_only_parent()[0].output_shape
return TensorShape(input_shape.batch_size, node.parameters.num_output, 1, 1) | 32.271605 | 87 | 0.695486 | 388 | 2,614 | 4.458763 | 0.314433 | 0.108092 | 0.017341 | 0.03815 | 0.156647 | 0.156647 | 0.09711 | 0.09711 | 0.09711 | 0 | 0 | 0.007763 | 0.211553 | 2,614 | 81 | 88 | 32.271605 | 0.831635 | 0.158761 | 0 | 0.08 | 0 | 0 | 0.022821 | 0 | 0 | 0 | 0 | 0.012346 | 0.04 | 1 | 0.22 | false | 0.02 | 0.04 | 0.06 | 0.48 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
35a0528618b14de6e3cba0fb508fcf4f28a2d36d | 338 | py | Python | command/sub8_missions/sub8_missions/ball_drop_test.py | ericgorday/SubjuGator | f45ac790f06eb97efc0b0810a7b43d0a6e2facee | [
"MIT"
] | 27 | 2020-02-17T21:54:09.000Z | 2022-03-18T17:49:23.000Z | command/sub8_missions/sub8_missions/ball_drop_test.py | ericgorday/SubjuGator | f45ac790f06eb97efc0b0810a7b43d0a6e2facee | [
"MIT"
] | 325 | 2019-09-11T14:13:56.000Z | 2022-03-31T00:38:30.000Z | command/sub8_missions/sub8_missions/ball_drop_test.py | ericgorday/SubjuGator | f45ac790f06eb97efc0b0810a7b43d0a6e2facee | [
"MIT"
] | 24 | 2019-09-16T00:29:45.000Z | 2022-03-06T10:56:38.000Z | #!/usr/bin/env python
from sub_singleton import SubjuGator
from txros import util
from twisted.internet import defer
class BallDropTest(SubjuGator):
@util.cancellableInlineCallbacks
def run(self, args):
self.send_feedback('Dropping Ball')
yield self.actuators.drop_marker()
defer.returnValue('Success!')
| 26 | 43 | 0.736686 | 40 | 338 | 6.15 | 0.775 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.174556 | 338 | 12 | 44 | 28.166667 | 0.88172 | 0.059172 | 0 | 0 | 0 | 0 | 0.066246 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.333333 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
35a2f3c3d2b6590fa1da0dc507e357b3cef66d0c | 2,767 | py | Python | app/models.py | FrancisFlow/Flask_Blog_App | bfc5534b5fdbb7f51fd04d30581507390eeed3ae | [
"MIT"
] | null | null | null | app/models.py | FrancisFlow/Flask_Blog_App | bfc5534b5fdbb7f51fd04d30581507390eeed3ae | [
"MIT"
] | null | null | null | app/models.py | FrancisFlow/Flask_Blog_App | bfc5534b5fdbb7f51fd04d30581507390eeed3ae | [
"MIT"
] | null | null | null | from . import db
from flask_login import UserMixin, current_user
from werkzeug.security import generate_password_hash, check_password_hash
from datetime import datetime
from . import login_manager
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(UserMixin, db.Model):
__tablename__='users'
id=db.Column(db.Integer, primary_key=True)
username=db.Column(db.String(255))
email=db.Column(db.String(255), unique=True, index=True)
bio=db.Column(db.String(255))
profile_pic_path=db.Column(db.String())
blog=db.relationship('Blog', backref='user', lazy='dynamic')
pass_secure=db.Column(db.String(255))
comment=db.relationship('Comment', backref='user', lazy='dynamic')
@property
def password(self):
raise AttributeError('You cannot read the password attribute')
@password.setter
def password(self, password):
self.pass_secure=generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.pass_secure, password)
class Blog(db.Model):
__tablename__='blogs'
id=db.Column(db.Integer, primary_key=True)
title=db.Column(db.String(255), nullable=False)
content=db.Column(db.String())
posted_on = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
user_id=db.Column(db.Integer, db.ForeignKey('users.id'))
comment=db.relationship('Comment', backref='blog', lazy='dynamic')
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
@classmethod
def get_blog(id):
blog=Blog.query.filter_by(id=id).first()
return blog
def __repr__(self):
return f'Blog {self.title}, {self.posted_on}'
class Comment(db.Model):
__tablename__='comments'
id=db.Column(db.Integer, primary_key=True)
comment=db.Column(db.String())
time=db.Column(db.DateTime, default=datetime.utcnow)
blog_id=db.Column(db.Integer, db.ForeignKey('blogs.id'))
user_id=db.Column(db.Integer, db.ForeignKey('users.id'))
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.remove(self)
db.session.commit()
def get_comment(id):
comment=Comment.query.all(id=id)
return comment
@classmethod
def get_comments(cls, id):
comments=Comment.query.filter_by(blog_id=id).all()
return comments
def __repr__(self):
return f'Comment: {self.comment}'
class Quote:
"""
Consuming quote api
"""
def __init__(self, author, quote):
self.author=author
self.quote=quote
| 28.234694 | 79 | 0.673654 | 371 | 2,767 | 4.862534 | 0.250674 | 0.070953 | 0.088692 | 0.070953 | 0.313193 | 0.189579 | 0.189579 | 0.172395 | 0.117517 | 0.117517 | 0 | 0.006745 | 0.196241 | 2,767 | 97 | 80 | 28.525773 | 0.804406 | 0.006867 | 0 | 0.263889 | 1 | 0 | 0.069205 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.194444 | false | 0.125 | 0.069444 | 0.055556 | 0.722222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
35b48b731a829155eeb04244096c8ec7f43d82a2 | 152 | py | Python | 0x01-python-if_else_loops_functions/100-print_tebahpla.py | darkares23/holbertonschool-higher_level_programming | 931b1b701d8a1d990b7cd931486496c0b5502e21 | [
"MIT"
] | null | null | null | 0x01-python-if_else_loops_functions/100-print_tebahpla.py | darkares23/holbertonschool-higher_level_programming | 931b1b701d8a1d990b7cd931486496c0b5502e21 | [
"MIT"
] | null | null | null | 0x01-python-if_else_loops_functions/100-print_tebahpla.py | darkares23/holbertonschool-higher_level_programming | 931b1b701d8a1d990b7cd931486496c0b5502e21 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
for i in range(ord('z'), ord('a') - 1, -1):
if (i % 2 != 0):
i = i - 32
i = chr(i)
print("{}".format(i), end='')
| 21.714286 | 43 | 0.414474 | 27 | 152 | 2.333333 | 0.703704 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.065421 | 0.296053 | 152 | 6 | 44 | 25.333333 | 0.523364 | 0.111842 | 0 | 0 | 0 | 0 | 0.029851 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.2 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
35b8d9f65dac7b48897080eefd9ed11f4a9ea4bd | 385 | py | Python | SQLAlchemy/ORM/data/__all_models.py | jgraber/PythonFriday | 879f10934dc6949785e5a799bfc3ca9a2a4434d4 | [
"MIT"
] | 5 | 2021-02-22T08:39:55.000Z | 2022-03-14T03:54:36.000Z | SQLAlchemy/ORM/data/__all_models.py | jgraber/PythonFriday | 879f10934dc6949785e5a799bfc3ca9a2a4434d4 | [
"MIT"
] | null | null | null | SQLAlchemy/ORM/data/__all_models.py | jgraber/PythonFriday | 879f10934dc6949785e5a799bfc3ca9a2a4434d4 | [
"MIT"
] | null | null | null | # Add all your SQLAlchemy models here.
# This allows you to import just this file
# whenever you need to work with your models
# (like creating tables or for migrations)
from data.employee import Employee
from data.publisher import Publisher
from data.book_author import BookAuthor
from data.book import Book
from data.author import Author
from data.book_details import BookDetails
| 32.083333 | 45 | 0.812987 | 60 | 385 | 5.183333 | 0.533333 | 0.154341 | 0.115756 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153247 | 385 | 11 | 46 | 35 | 0.953988 | 0.423377 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
35d9953119cc8a1275a595e99ffd1de8a75316d1 | 1,005 | py | Python | spotpuppy/rotation/rotation_sensor_base.py | JoshPattman/Spot-Puppy-Lib | 90172c269ccaf7feefe55257606e0c519871a66d | [
"MIT"
] | 1 | 2021-11-16T13:24:16.000Z | 2021-11-16T13:24:16.000Z | spotpuppy/rotation/rotation_sensor_base.py | JoshPattman/spotpuppy | 90172c269ccaf7feefe55257606e0c519871a66d | [
"MIT"
] | null | null | null | spotpuppy/rotation/rotation_sensor_base.py | JoshPattman/spotpuppy | 90172c269ccaf7feefe55257606e0c519871a66d | [
"MIT"
] | null | null | null | import numpy as np
class sensor:
def __init__(self, inverse_x=False, inverse_z=False):
self.inverse_x = inverse_x
self.inverse_z = inverse_z
self.flip_x_z = False
self.rotation = np.array([0, 0])
def get_angle(self):
rot = np.copy(self.rotation)
if self.flip_x_z:
rot[0], rot[1] = rot[1], rot[0]
if self.inverse_x:
rot[0] = -rot[0]
if self.inverse_z:
rot[1] = -rot[1]
return rot
# Override update to add functionality
def update(self):
pass
# Override update to add the ability to calibrate
def calibrate(self):
pass
def get_json_params(self):
return {"inverse_x": self.inverse_x,
"inverse_z": self.inverse_z,
"flip_x_z": self.flip_x_z
}
def set_json_params(self, d):
self.inverse_x = d['inverse_x']
self.inverse_z = d['inverse_z']
self.flip_x_z = d['flip_x_z']
| 25.769231 | 57 | 0.561194 | 146 | 1,005 | 3.609589 | 0.253425 | 0.187856 | 0.068311 | 0.075901 | 0.229602 | 0.068311 | 0 | 0 | 0 | 0 | 0 | 0.014881 | 0.331343 | 1,005 | 38 | 58 | 26.447368 | 0.769345 | 0.083582 | 0 | 0.068966 | 0 | 0 | 0.056645 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.206897 | false | 0.068966 | 0.034483 | 0.034483 | 0.344828 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
35e828b6d90c16ce9453d3cc5df2e776587b944f | 481 | py | Python | app/main/__init__.py | sonny-zhang/MyBlog | 880a80c5d95f472f0301f7380addc6c31d341b70 | [
"MIT"
] | null | null | null | app/main/__init__.py | sonny-zhang/MyBlog | 880a80c5d95f472f0301f7380addc6c31d341b70 | [
"MIT"
] | null | null | null | app/main/__init__.py | sonny-zhang/MyBlog | 880a80c5d95f472f0301f7380addc6c31d341b70 | [
"MIT"
] | null | null | null | # @Time : 2019/3/6 21:31
# @Author : sonny-zhang
# @FileName : __init__.py
# @Blog : http://www.cnblogs.com/1fengchen1/
from flask import Blueprint
#: 创建蓝图
main = Blueprint('main', __name__)
#: 初始化其他模块。必须放在创建蓝图的后面,否则导入包会包找不到main蓝图实例。(urls里导入了views,所以不用加载了)
from . import urls, errors
from app.models import Permission
@main.app_context_processor
def inject_permissions():
"""为了不用每次响应都要添加一个权限参数:
将Permission添加到上下文处理器"""
return dict(Permission=Permission)
| 22.904762 | 65 | 0.72973 | 54 | 481 | 6.296296 | 0.814815 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02934 | 0.149688 | 481 | 20 | 66 | 24.05 | 0.801956 | 0.484407 | 0 | 0 | 0 | 0 | 0.017167 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.428571 | 0 | 0.714286 | 0.285714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
ea081e4ede1df780d5cc769a8571dbde38047d3f | 375 | py | Python | excel/xlrd&xlwt.py | chengchengXu/try_python | 45d4b450d4503ee37072a1471bceddbfb48601ae | [
"MIT"
] | null | null | null | excel/xlrd&xlwt.py | chengchengXu/try_python | 45d4b450d4503ee37072a1471bceddbfb48601ae | [
"MIT"
] | null | null | null | excel/xlrd&xlwt.py | chengchengXu/try_python | 45d4b450d4503ee37072a1471bceddbfb48601ae | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import xdrlib, sys
import xlrd
import xlwt
def open_excel(filename):
try:
data = xlrd.open_workbook(filename)
return data
except Exception(e):
print(str(e))
def handle_excel(data):
def main():
filename = u".xlsx"
data = open_excel(filename)
handle_excel(data)
def __name__=="__main__":
main()
| 15.625 | 43 | 0.621333 | 48 | 375 | 4.583333 | 0.541667 | 0.081818 | 0.154545 | 0.163636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003571 | 0.253333 | 375 | 23 | 44 | 16.304348 | 0.782143 | 0.056 | 0 | 0 | 0 | 0 | 0.036932 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.1875 | null | null | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ea0ef11e7bbfe9c3c42fc9be9eef6b9c9443378f | 1,204 | py | Python | checkov/gitlab/runner.py | pmalkki/checkov | b6cdf386dd976fe27c16fed6d550756a678a5d7b | [
"Apache-2.0"
] | 1 | 2022-02-20T21:20:39.000Z | 2022-02-20T21:20:39.000Z | checkov/gitlab/runner.py | pmalkki/checkov | b6cdf386dd976fe27c16fed6d550756a678a5d7b | [
"Apache-2.0"
] | 3 | 2022-03-07T20:37:31.000Z | 2022-03-21T20:20:14.000Z | checkov/gitlab/runner.py | pmalkki/checkov | b6cdf386dd976fe27c16fed6d550756a678a5d7b | [
"Apache-2.0"
] | null | null | null | from checkov.common.output.report import CheckType
from checkov.gitlab.dal import Gitlab
from checkov.json_doc.runner import Runner as JsonRunner
from checkov.runner_filter import RunnerFilter
class Runner(JsonRunner):
check_type = CheckType.GITLAB_CONFIGURATION
def __init__(self):
self.gitlab = Gitlab()
super().__init__()
def run(self, root_folder=None, external_checks_dir=None, files=None,
runner_filter=RunnerFilter(), collect_skip_comments=True):
self.prepare_data()
report = super().run(root_folder=self.gitlab.gitlab_conf_dir_path, external_checks_dir=external_checks_dir,
files=files,
runner_filter=runner_filter, collect_skip_comments=collect_skip_comments)
JsonRunner._change_files_path_to_relative(report)
return report
def prepare_data(self):
self.gitlab.persist_all_confs()
def require_external_checks(self):
# default json runner require only external checks. Gitlab runner brings build in checks
return False
def import_registry(self):
from checkov.gitlab.registry import registry
return registry
| 35.411765 | 115 | 0.712625 | 145 | 1,204 | 5.613793 | 0.37931 | 0.067568 | 0.062654 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.22093 | 1,204 | 33 | 116 | 36.484848 | 0.867804 | 0.071429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.208333 | false | 0 | 0.25 | 0.041667 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
ea1f3e0b858f021f7db57a9435af25708e8e42a7 | 443 | py | Python | ape_kong/utils.py | unparalleled-js/ape-kong | 4acf792d64a057f74b61fb7342e5447b0aae83ec | [
"MIT"
] | null | null | null | ape_kong/utils.py | unparalleled-js/ape-kong | 4acf792d64a057f74b61fb7342e5447b0aae83ec | [
"MIT"
] | null | null | null | ape_kong/utils.py | unparalleled-js/ape-kong | 4acf792d64a057f74b61fb7342e5447b0aae83ec | [
"MIT"
] | null | null | null | from typing import Tuple
def get_clear_space(original_y, new_y, original_x, new_x, text) -> Tuple[int, int]:
if original_x < new_x:
# Moved right
start_x = original_x
length = new_x - original_x
elif original_x > new_x:
# Moved left
start_x = new_x + len(text)
length = original_x - new_x
else:
start_x = original_x
length = len(text)
return start_x, length
| 22.15 | 83 | 0.609481 | 66 | 443 | 3.772727 | 0.363636 | 0.253012 | 0.100402 | 0.208835 | 0.313253 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.316027 | 443 | 19 | 84 | 23.315789 | 0.821782 | 0.049661 | 0 | 0.166667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.083333 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ea2e0f76876cbb12cf5a04a3803fa9d6e675ae5c | 467 | py | Python | server/fire_watch/views.py | Aradhya-Tripathi/free-watch | c3353c0eec8d545372f22d6ac437ed71c1940f70 | [
"MIT"
] | 5 | 2021-10-09T09:36:12.000Z | 2021-12-14T17:03:31.000Z | server/fire_watch/views.py | Aradhya-Tripathi/free-watch | c3353c0eec8d545372f22d6ac437ed71c1940f70 | [
"MIT"
] | 8 | 2021-10-04T07:12:11.000Z | 2021-12-13T18:19:35.000Z | server/fire_watch/views.py | Aradhya-Tripathi/free-watch | c3353c0eec8d545372f22d6ac437ed71c1940f70 | [
"MIT"
] | 1 | 2021-12-17T09:12:49.000Z | 2021-12-17T09:12:49.000Z | import os
import time
import psutil
from apis.views import JsonResponse, Throttle
from apis.views.decorators import api_view
@api_view(["GET"], [Throttle])
def health_check(request) -> JsonResponse:
"""Health check route
Args:
request (request): Request
Returns:
JsonResponse: Uptime
"""
uptime = time.time() - psutil.Process(os.getpid()).create_time()
return JsonResponse(data={"uptime": uptime, "OK": True}, status=200)
| 22.238095 | 72 | 0.685225 | 56 | 467 | 5.642857 | 0.535714 | 0.050633 | 0.082278 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007916 | 0.188437 | 467 | 20 | 73 | 23.35 | 0.825858 | 0.194861 | 0 | 0 | 0 | 0 | 0.031519 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.555556 | 0 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
ea478e7def100b4888c4259b27641626eff18ca7 | 1,719 | py | Python | tsx/api/api.py | nesp-tsr/tsx | 99125db6e81652592cf4b3e8a89f8c8ac8de64af | [
"MIT"
] | 3 | 2019-01-27T12:03:46.000Z | 2022-01-29T02:06:33.000Z | tsx/api/api.py | nesp-tsr/tsx | 99125db6e81652592cf4b3e8a89f8c8ac8de64af | [
"MIT"
] | 27 | 2019-02-25T22:49:14.000Z | 2022-02-02T04:07:52.000Z | tsx/api/api.py | nesp-tsr/tsx | 99125db6e81652592cf4b3e8a89f8c8ac8de64af | [
"MIT"
] | 1 | 2019-02-28T04:24:43.000Z | 2019-02-28T04:24:43.000Z | from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash, json
import tsx.api.lpi_data
from tsx.api.util import setup_db
import tsx.config
import uuid
from flask_cors import CORS
from flask_session import Session
import os.path
import tsx.api.upload
import tsx.api.lpi_data
import tsx.api.data_import
import tsx.api.misc
import tsx.api.user
import tsx.api.program_manager
import tsx.api.subset
import datetime
app = Flask('tsx')
app.config['UPLOAD_DIR'] = tsx.config.data_dir("upload")
# Enable CORS
CORS(app=app, supports_credentials=True)
# CORS(app=app, send_wildcard=True)
# Setup secret key
app.secret_key = tsx.config.get("api", "secret_key") or "not-secret"
# app.config['SECRET_KEY'] = tsx.config.get("api", "secret_key") or "not-secret"
app.config['SESSION_TYPE']='filesystem'
Session(app)
setup_db(app)
app.register_blueprint(tsx.api.upload.bp)
app.register_blueprint(tsx.api.lpi_data.bp)
app.register_blueprint(tsx.api.data_import.bp)
app.register_blueprint(tsx.api.misc.bp)
app.register_blueprint(tsx.api.user.bp)
app.register_blueprint(tsx.api.program_manager.bp)
app.register_blueprint(tsx.api.subset.bp)
class DateTimeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat() + 'Z'
if isinstance(obj, (datetime.date, datetime.time)):
return obj.isoformat()
elif isinstance(obj, datetime.timedelta):
return (datetime.datetime.min + obj).time().isoformat()
return super(DateTimeEncoder, self).default(obj)
app.json_encoder = DateTimeEncoder
# @app.before_first_request
# def app_init():
# tsx.api.data_import.start_processing_workers()
# tsx.api.data_import.process_unprocessed()
| 27.725806 | 100 | 0.777196 | 263 | 1,719 | 4.935361 | 0.296578 | 0.083205 | 0.07396 | 0.124037 | 0.260401 | 0.211094 | 0.081664 | 0.081664 | 0.081664 | 0.081664 | 0 | 0 | 0.094823 | 1,719 | 61 | 101 | 28.180328 | 0.83419 | 0.159395 | 0 | 0.05 | 0 | 0 | 0.045265 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025 | false | 0 | 0.425 | 0 | 0.575 | 0.175 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
ea59ac372a9f19f93301b890428a0430eb972164 | 884 | py | Python | azext_concierge/__init__.py | egineering-llc/azure-cli-concierge-extension | e6a61ff1bf4b355aae417c3d4dd3d7508fdff502 | [
"MIT"
] | 1 | 2019-12-20T17:49:17.000Z | 2019-12-20T17:49:17.000Z | azext_concierge/__init__.py | egineering-llc/azure-cli-concierge-extension | e6a61ff1bf4b355aae417c3d4dd3d7508fdff502 | [
"MIT"
] | null | null | null | azext_concierge/__init__.py | egineering-llc/azure-cli-concierge-extension | e6a61ff1bf4b355aae417c3d4dd3d7508fdff502 | [
"MIT"
] | null | null | null | from knack.help_files import helps
from azure.cli.core import AzCommandsLoader
class ConciergeCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core.commands import CliCommandType
custom_type = CliCommandType(operations_tmpl='azext_concierge#{}')
super(ConciergeCommandsLoader, self).__init__(cli_ctx=cli_ctx,
custom_command_type=custom_type)
def load_command_table(self, args):
from azext_concierge.concierge.general.commands import load_general_commands
load_general_commands(self, args)
return self.command_table
def load_arguments(self, command):
from azext_concierge.concierge.general.arguments import load_general_arguments
load_general_arguments(self, command)
COMMAND_LOADER_CLS = ConciergeCommandsLoader
| 36.833333 | 87 | 0.730769 | 97 | 884 | 6.309278 | 0.360825 | 0.071895 | 0.039216 | 0.052288 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.208145 | 884 | 23 | 88 | 38.434783 | 0.874286 | 0 | 0 | 0 | 0 | 0 | 0.020362 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1875 | false | 0 | 0.3125 | 0 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
ea6a6aa44c90858863767cb880ca639fd2316449 | 235 | py | Python | QuickStarts/Visualization.py | nj-czy/UCTB | bddb8b47953bef1f44cb06f1a57a3d7efbd31c3a | [
"MIT"
] | 28 | 2020-02-28T03:16:43.000Z | 2022-03-31T07:24:47.000Z | QuickStarts/Visualization.py | nj-czy/UCTB | bddb8b47953bef1f44cb06f1a57a3d7efbd31c3a | [
"MIT"
] | 8 | 2020-06-30T09:34:56.000Z | 2022-01-17T12:20:28.000Z | QuickStarts/Visualization.py | nj-czy/UCTB | bddb8b47953bef1f44cb06f1a57a3d7efbd31c3a | [
"MIT"
] | 13 | 2020-06-04T09:47:36.000Z | 2022-02-25T09:50:52.000Z | from UCTB.dataset import NodeTrafficLoader
#from UCTB.utils import st_map
from dateutil.parser import parse
# Config data loader
data_loader = NodeTrafficLoader(dataset='Bike', city='NYC', with_lm=False)
data_loader.st_map() | 26.111111 | 75 | 0.778723 | 33 | 235 | 5.393939 | 0.606061 | 0.168539 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.13617 | 235 | 9 | 76 | 26.111111 | 0.876847 | 0.204255 | 0 | 0 | 0 | 0 | 0.039326 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
ea6f623c6f3c46a55e00fac3ec0c49a420ce129f | 128 | py | Python | src/genie/libs/parser/generic/tests/ShowVersion/cli/equal/golden_output_iosxe_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/generic/tests/ShowVersion/cli/equal/golden_output_iosxe_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/generic/tests/ShowVersion/cli/equal/golden_output_iosxe_2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | null | null | null | expected_output = {
'model': 'WS-C3650-48PD',
'os': 'iosxe',
'platform': 'cat3k_caa',
'version': '03.06.07E',
}
| 18.285714 | 29 | 0.539063 | 15 | 128 | 4.466667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.13 | 0.21875 | 128 | 6 | 30 | 21.333333 | 0.54 | 0 | 0 | 0 | 0 | 0 | 0.453125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ea7e251ad9a592e0a63ce82ecd0609eacd0487ff | 780 | py | Python | test/fasta/test.py | PapenfussLab/Mungo | 02c5b0e48ecd28596cb9481b282753859f47fed6 | [
"Artistic-2.0"
] | 1 | 2015-09-16T07:53:18.000Z | 2015-09-16T07:53:18.000Z | test/fasta/test.py | PapenfussLab/Mungo | 02c5b0e48ecd28596cb9481b282753859f47fed6 | [
"Artistic-2.0"
] | null | null | null | test/fasta/test.py | PapenfussLab/Mungo | 02c5b0e48ecd28596cb9481b282753859f47fed6 | [
"Artistic-2.0"
] | 3 | 2016-01-02T16:34:59.000Z | 2021-07-21T04:21:55.000Z | #!/usr/bin/env python
"""
test.py
Author: Tony Papenfuss
Date: Tue Mar 18 14:27:10 EST 2008
"""
import os, sys
def rule():
print '-'*80
from fasta import FastaFile
f = FastaFile('HSP100.fa')
rule()
print 'readOne'
print f.readOne()
print f.readOne()
rule()
print 'readAll'
print f.readAll()
rule()
print 'Mapping'
f.asMapping()
print f
rule()
rule()
f = FastaFile('HSP100.fa', indexed=True, interface='container')
print "Iteration"
i = 0
for h,s in f:
print h,s
i += 1
if i==4: break
print '---'
for h,s in f:
print h,s
rule()
f.reset()
print "Slicing"
print " Indexed"
print f[0]
print
print f[2]
print
print f.readOne()
rule()
print " Slicing"
print f[5:8]
print
print f[0:2]
rule()
print "As mapping"
f.asMapping()
print f['5']
print
rule()
| 10.985915 | 63 | 0.646154 | 133 | 780 | 3.789474 | 0.421053 | 0.119048 | 0.077381 | 0.071429 | 0.238095 | 0.059524 | 0.059524 | 0.059524 | 0 | 0 | 0 | 0.047468 | 0.189744 | 780 | 70 | 64 | 11.142857 | 0.75 | 0.025641 | 0 | 0.5 | 0 | 0 | 0.138889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.041667 | null | null | 0.541667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
ea7e7084b7ead4b62b0f9649b9df31b235ad673b | 545 | py | Python | fahari/ops/migrations/0005_auto_20210727_1306.py | savannahghi/mle | 4d65f8a573935d39700a0d6f8a8eb79cb5640288 | [
"MIT"
] | 2 | 2021-09-14T07:14:27.000Z | 2022-01-06T12:21:34.000Z | fahari/ops/migrations/0005_auto_20210727_1306.py | savannahghi/fahari | 4d65f8a573935d39700a0d6f8a8eb79cb5640288 | [
"MIT"
] | 1 | 2022-02-27T18:03:17.000Z | 2022-02-27T18:03:17.000Z | fahari/ops/migrations/0005_auto_20210727_1306.py | savannahghi/mle | 4d65f8a573935d39700a0d6f8a8eb79cb5640288 | [
"MIT"
] | 2 | 2022-03-12T13:25:09.000Z | 2022-03-21T08:44:33.000Z | # Generated by Django 3.2.5 on 2021-07-27 10:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ops', '0004_stockreceiptverification'),
]
operations = [
migrations.AlterModelOptions(
name='activitylog',
options={'ordering': ('-planned_date', '-requested_date', '-procurement_date')},
),
migrations.AlterModelOptions(
name='dailyupdate',
options={'ordering': ('-date', 'facility__name')},
),
]
| 24.772727 | 92 | 0.592661 | 47 | 545 | 6.744681 | 0.723404 | 0.170347 | 0.195584 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.047619 | 0.26789 | 545 | 21 | 93 | 25.952381 | 0.746867 | 0.082569 | 0 | 0.266667 | 1 | 0 | 0.269076 | 0.058233 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.066667 | 0 | 0.266667 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ea8a7a813169b384400d66c809dd050fb7b25601 | 22,941 | py | Python | pysnmp-with-texts/DKSF-54-1-X-X-1.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/DKSF-54-1-X-X-1.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/DKSF-54-1-X-X-1.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module DKSF-54-1-X-X-1 (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DKSF-54-1-X-X-1
# Produced by pysmi-0.3.4 at Wed May 1 12:47:27 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
snmpTraps, = mibBuilder.importSymbols("SNMPv2-MIB", "snmpTraps")
Unsigned32, Gauge32, Bits, ModuleIdentity, ObjectIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, IpAddress, iso, Integer32, Counter32, Counter64, NotificationType, enterprises = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "Gauge32", "Bits", "ModuleIdentity", "ObjectIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "IpAddress", "iso", "Integer32", "Counter32", "Counter64", "NotificationType", "enterprises")
TextualConvention, DisplayString, TruthValue, TimeStamp = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue", "TimeStamp")
netPing4Pwr = ModuleIdentity((1, 3, 6, 1, 4, 1, 25728, 54))
netPing4Pwr.setRevisions(('2015-03-02 00:00', '2014-06-19 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: netPing4Pwr.setRevisionsDescriptions(('npRelHumidity branch added npGsmSendSms variable added npRelayMode values redefined', 'Initial release',))
if mibBuilder.loadTexts: netPing4Pwr.setLastUpdated('201503020000Z')
if mibBuilder.loadTexts: netPing4Pwr.setOrganization('Alentis Electronics')
if mibBuilder.loadTexts: netPing4Pwr.setContactInfo('developers@netping.ru')
if mibBuilder.loadTexts: netPing4Pwr.setDescription('MIB for NetPing 4PWR-220/SMS remote sensing and control')
lightcom = MibIdentifier((1, 3, 6, 1, 4, 1, 25728))
npRelay = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 5500))
npRelayTable = MibTable((1, 3, 6, 1, 4, 1, 25728, 5500, 5), )
if mibBuilder.loadTexts: npRelayTable.setStatus('current')
if mibBuilder.loadTexts: npRelayTable.setDescription('Watchdog and outlet/relay control table')
npRelayEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25728, 5500, 5, 1), ).setIndexNames((0, "DKSF-54-1-X-X-1", "npRelayN"))
if mibBuilder.loadTexts: npRelayEntry.setStatus('current')
if mibBuilder.loadTexts: npRelayEntry.setDescription('Relay/outlet table row')
npRelayN = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5500, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelayN.setStatus('current')
if mibBuilder.loadTexts: npRelayN.setDescription('The N of output relay')
npRelayMode = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5500, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-1, 0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("flip", -1), ("off", 0), ("on", 1), ("watchdog", 2), ("schedule", 3), ("scheduleAndWatchdog", 4), ("logic", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: npRelayMode.setStatus('current')
if mibBuilder.loadTexts: npRelayMode.setDescription('Control of relay: -1 - flip between on(1) and off(0) 0 - manual off 1 - manual on 2 - watchdog 3 - schedule 4 - both schedule and watchdog (while switched on by schedule) 5 - logic')
npRelayStartReset = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5500, 5, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: npRelayStartReset.setStatus('current')
if mibBuilder.loadTexts: npRelayStartReset.setDescription('Write 1 to start reset (switch relay off for some time)')
npRelayMemo = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5500, 5, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelayMemo.setStatus('current')
if mibBuilder.loadTexts: npRelayMemo.setDescription('Relay memo')
npRelayFlip = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5500, 5, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-1))).clone(namedValues=NamedValues(("flip", -1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelayFlip.setStatus('current')
if mibBuilder.loadTexts: npRelayFlip.setDescription('Write -1 to flip between manual on and manual off states of relay')
npRelayState = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5500, 5, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelayState.setStatus('current')
if mibBuilder.loadTexts: npRelayState.setDescription('Actual relay state at the moment, regardless of source of control. 0 - relay is off 1 - relay is on')
npRelayPowered = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5500, 5, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelayPowered.setStatus('current')
if mibBuilder.loadTexts: npRelayPowered.setDescription('AC presence on output (relay operation check) 0 - no AC on output socket 1 - AC is present on oputput')
npPwr = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 5800))
npPwrTable = MibTable((1, 3, 6, 1, 4, 1, 25728, 5800, 3), )
if mibBuilder.loadTexts: npPwrTable.setStatus('current')
if mibBuilder.loadTexts: npPwrTable.setDescription('Watchdog and outlet/relay control table')
npPwrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25728, 5800, 3, 1), ).setIndexNames((0, "DKSF-54-1-X-X-1", "npPwrChannelN"))
if mibBuilder.loadTexts: npPwrEntry.setStatus('current')
if mibBuilder.loadTexts: npPwrEntry.setDescription('Watchdog control table row')
npPwrChannelN = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5800, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npPwrChannelN.setStatus('current')
if mibBuilder.loadTexts: npPwrChannelN.setDescription('The id of watchdog/power channel')
npPwrStartReset = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5800, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npPwrStartReset.setStatus('obsolete')
if mibBuilder.loadTexts: npPwrStartReset.setDescription('Deprecated in current FW version: Write 1 to start forced reset. On read: 0 - normal operation 1 - reset is active 2 - reboot pause is active or watchdog is inactive')
npPwrResetsCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5800, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: npPwrResetsCounter.setStatus('current')
if mibBuilder.loadTexts: npPwrResetsCounter.setDescription('Counter of watchdog resets Write 0 to clear.')
npPwrRepeatingResetsCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5800, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npPwrRepeatingResetsCounter.setStatus('current')
if mibBuilder.loadTexts: npPwrRepeatingResetsCounter.setDescription('Counter of continous failed watchdog resets')
npPwrMemo = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 5800, 3, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: npPwrMemo.setStatus('current')
if mibBuilder.loadTexts: npPwrMemo.setDescription('Watchdog channel memo')
npThermo = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 8800))
npThermoTable = MibTable((1, 3, 6, 1, 4, 1, 25728, 8800, 1), )
if mibBuilder.loadTexts: npThermoTable.setStatus('current')
if mibBuilder.loadTexts: npThermoTable.setDescription('Thermo Sensors Table')
npThermoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25728, 8800, 1, 1), ).setIndexNames((0, "DKSF-54-1-X-X-1", "npThermoSensorN"))
if mibBuilder.loadTexts: npThermoEntry.setStatus('current')
if mibBuilder.loadTexts: npThermoEntry.setDescription('Thermo Sensors Table Row')
npThermoSensorN = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 8800, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoSensorN.setStatus('current')
if mibBuilder.loadTexts: npThermoSensorN.setDescription('The id of temperature sensor, 1 to 8')
npThermoValue = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 8800, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-60, 280))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoValue.setStatus('current')
if mibBuilder.loadTexts: npThermoValue.setDescription('Temperature, deg.C')
npThermoStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 8800, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("failed", 0), ("low", 1), ("norm", 2), ("high", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoStatus.setStatus('current')
if mibBuilder.loadTexts: npThermoStatus.setDescription('Temperature status (0=fault, 1=underheat, 2=normal, 3=overheat)')
npThermoLow = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 8800, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-60, 280))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoLow.setStatus('current')
if mibBuilder.loadTexts: npThermoLow.setDescription('Bottom margin of normal temperature range, deg.C')
npThermoHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 8800, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-60, 280))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoHigh.setStatus('current')
if mibBuilder.loadTexts: npThermoHigh.setDescription('Top margin of normal temperature range, deg.C')
npThermoMemo = MibTableColumn((1, 3, 6, 1, 4, 1, 25728, 8800, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoMemo.setStatus('current')
if mibBuilder.loadTexts: npThermoMemo.setDescription('T channel memo')
npThermoTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 8800, 2))
npThermoTrapPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 8800, 2, 0))
npThermoTrapSensorN = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8800, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoTrapSensorN.setStatus('current')
if mibBuilder.loadTexts: npThermoTrapSensorN.setDescription('The id of temperature sensor, 1 to 8')
npThermoTrapValue = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8800, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-60, 280))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoTrapValue.setStatus('current')
if mibBuilder.loadTexts: npThermoTrapValue.setDescription('Temperature, deg.C')
npThermoTrapStatus = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8800, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("failed", 0), ("low", 1), ("norm", 2), ("high", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoTrapStatus.setStatus('current')
if mibBuilder.loadTexts: npThermoTrapStatus.setDescription('Temperature status (0=fault, 1=underheat, 2=normal, 3=overheat)')
npThermoTrapLow = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8800, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-60, 280))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoTrapLow.setStatus('current')
if mibBuilder.loadTexts: npThermoTrapLow.setDescription('Bottom margin of normal temperature range, deg.C')
npThermoTrapHigh = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8800, 2, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-60, 280))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoTrapHigh.setStatus('current')
if mibBuilder.loadTexts: npThermoTrapHigh.setDescription('Top margin of normal temperature range, deg.C')
npThermoTrapMemo = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8800, 2, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: npThermoTrapMemo.setStatus('current')
if mibBuilder.loadTexts: npThermoTrapMemo.setDescription('T channel memo')
npThermoTrap = NotificationType((1, 3, 6, 1, 4, 1, 25728, 8800, 2, 0, 1)).setObjects(("DKSF-54-1-X-X-1", "npThermoTrapSensorN"), ("DKSF-54-1-X-X-1", "npThermoTrapValue"), ("DKSF-54-1-X-X-1", "npThermoTrapStatus"), ("DKSF-54-1-X-X-1", "npThermoTrapLow"), ("DKSF-54-1-X-X-1", "npThermoTrapHigh"), ("DKSF-54-1-X-X-1", "npThermoTrapMemo"))
if mibBuilder.loadTexts: npThermoTrap.setStatus('current')
if mibBuilder.loadTexts: npThermoTrap.setDescription('Status of Thermo sensor is changed (crossing of normal temp. range)')
npRelHumidity = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 8400))
npRelHumSensor = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 8400, 2))
npRelHumSensorStatus = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8400, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("error", 0), ("ok", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelHumSensorStatus.setStatus('current')
if mibBuilder.loadTexts: npRelHumSensorStatus.setDescription("Status of the Rel.Humidity Sensor One 1=Normal, 0=Error or Sensor isn't connected")
npRelHumSensorValueH = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8400, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelHumSensorValueH.setStatus('current')
if mibBuilder.loadTexts: npRelHumSensorValueH.setDescription('Relative humidity value, %')
npRelHumSensorValueT = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8400, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-60, 200))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelHumSensorValueT.setStatus('current')
if mibBuilder.loadTexts: npRelHumSensorValueT.setDescription('Sensor temperature, deg.C')
npRelHumSensorStatusH = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8400, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("sensorFailed", 0), ("belowSafeRange", 1), ("inSafeRange", 2), ("aboveSafeRange", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelHumSensorStatusH.setStatus('current')
if mibBuilder.loadTexts: npRelHumSensorStatusH.setDescription('Status of Relative Humiduty')
npRelHumSafeRangeHigh = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8400, 2, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelHumSafeRangeHigh.setStatus('current')
if mibBuilder.loadTexts: npRelHumSafeRangeHigh.setDescription('Relative Humidity safe range, top margin, %RH')
npRelHumSafeRangeLow = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8400, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelHumSafeRangeLow.setStatus('current')
if mibBuilder.loadTexts: npRelHumSafeRangeLow.setDescription('Relative Humidity safe range, bottom margin, %RH')
npRelHumSensorValueT100 = MibScalar((1, 3, 6, 1, 4, 1, 25728, 8400, 2, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: npRelHumSensorValueT100.setStatus('current')
if mibBuilder.loadTexts: npRelHumSensorValueT100.setDescription('Sensor temperature, deg.C * 100 (fixed point two decimal places) Used to get access to the fractional part of T value')
npRelHumTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 8400, 9))
npRelHumTrapPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 8400, 9, 0))
npRelHumTrap = NotificationType((1, 3, 6, 1, 4, 1, 25728, 8400, 9, 0, 1)).setObjects(("DKSF-54-1-X-X-1", "npRelHumSensorStatusH"), ("DKSF-54-1-X-X-1", "npRelHumSensorValueH"), ("DKSF-54-1-X-X-1", "npRelHumSafeRangeHigh"), ("DKSF-54-1-X-X-1", "npRelHumSafeRangeLow"))
if mibBuilder.loadTexts: npRelHumTrap.setStatus('current')
if mibBuilder.loadTexts: npRelHumTrap.setDescription('Status of Relative Humidity RH sensor has changed!')
npGsm = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 3800))
npGsmInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 3800, 1))
npGsmFailed = MibScalar((1, 3, 6, 1, 4, 1, 25728, 3800, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("ok", 0), ("failed", 1), ("fatalError", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npGsmFailed.setStatus('current')
if mibBuilder.loadTexts: npGsmFailed.setDescription("Firmware's GSM module status")
npGsmRegistration = MibScalar((1, 3, 6, 1, 4, 1, 25728, 3800, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 255))).clone(namedValues=NamedValues(("impossible", 0), ("homeNetwork", 1), ("searching", 2), ("denied", 3), ("unknown", 4), ("roaming", 5), ("infoUpdate", 255)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npGsmRegistration.setStatus('current')
if mibBuilder.loadTexts: npGsmRegistration.setDescription('Status of modem registration in GSM network (AT+CREG? result)')
npGsmStrength = MibScalar((1, 3, 6, 1, 4, 1, 25728, 3800, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npGsmStrength.setStatus('current')
if mibBuilder.loadTexts: npGsmStrength.setDescription('GSM signal strength. 0..31 = 0..100%, 99 = unknown or n/a, 255 = updating info')
npGsmSendSms = MibScalar((1, 3, 6, 1, 4, 1, 25728, 3800, 1, 9), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: npGsmSendSms.setStatus('current')
if mibBuilder.loadTexts: npGsmSendSms.setDescription('Send arbitrary SMS. Format: [phone_number,phone_number,...] Message One to four destination phone numbers If [] and numbers omitted, mesagge will be sent to preset numbers from SMS setup Only Latin characters allowed in message body')
npGsmTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 3800, 2))
npGsmTrapPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 3800, 2, 0))
npGsmTrap = NotificationType((1, 3, 6, 1, 4, 1, 25728, 3800, 2, 0, 1)).setObjects(("DKSF-54-1-X-X-1", "npGsmFailed"), ("DKSF-54-1-X-X-1", "npGsmRegistration"), ("DKSF-54-1-X-X-1", "npGsmStrength"))
if mibBuilder.loadTexts: npGsmTrap.setStatus('current')
if mibBuilder.loadTexts: npGsmTrap.setDescription('GSM modem or SMS firmware problems')
npBattery = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 3900))
npBatteryInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 3900, 1))
npBatteryPok = MibScalar((1, 3, 6, 1, 4, 1, 25728, 3900, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("batteryPower", 0), ("externalPower", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npBatteryPok.setStatus('current')
if mibBuilder.loadTexts: npBatteryPok.setDescription('Power source')
npBatteryLevel = MibScalar((1, 3, 6, 1, 4, 1, 25728, 3900, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npBatteryLevel.setStatus('current')
if mibBuilder.loadTexts: npBatteryLevel.setDescription('Battery charge, approximate value, in percent. Valid only if npBatteryPok = 0')
npBatteryChg = MibScalar((1, 3, 6, 1, 4, 1, 25728, 3900, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("batteryChargingSuspended", 0), ("batteryFastCharging", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: npBatteryChg.setStatus('current')
if mibBuilder.loadTexts: npBatteryChg.setDescription('Battery chargeing status. 0 if charging suspended or battery is full, 1 while LiPo fast charging.')
npReboot = MibIdentifier((1, 3, 6, 1, 4, 1, 25728, 911))
npSoftReboot = MibScalar((1, 3, 6, 1, 4, 1, 25728, 911, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: npSoftReboot.setStatus('current')
if mibBuilder.loadTexts: npSoftReboot.setDescription('Write 1 to reboot device after current operations completition')
npResetStack = MibScalar((1, 3, 6, 1, 4, 1, 25728, 911, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: npResetStack.setStatus('current')
if mibBuilder.loadTexts: npResetStack.setDescription('Write 1 to re-initialize network stack')
npForcedReboot = MibScalar((1, 3, 6, 1, 4, 1, 25728, 911, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: npForcedReboot.setStatus('current')
if mibBuilder.loadTexts: npForcedReboot.setDescription('Write 1 to immediate forced reboot')
mibBuilder.exportSymbols("DKSF-54-1-X-X-1", npThermoSensorN=npThermoSensorN, npBatteryLevel=npBatteryLevel, npPwrTable=npPwrTable, npThermoTrapPrefix=npThermoTrapPrefix, netPing4Pwr=netPing4Pwr, npThermo=npThermo, npThermoTrapSensorN=npThermoTrapSensorN, npPwrRepeatingResetsCounter=npPwrRepeatingResetsCounter, npRelHumidity=npRelHumidity, npBattery=npBattery, npSoftReboot=npSoftReboot, npGsmSendSms=npGsmSendSms, npRelayTable=npRelayTable, PYSNMP_MODULE_ID=netPing4Pwr, npRelayStartReset=npRelayStartReset, npResetStack=npResetStack, npThermoTrap=npThermoTrap, npGsmStrength=npGsmStrength, npThermoTraps=npThermoTraps, npThermoStatus=npThermoStatus, npThermoTrapHigh=npThermoTrapHigh, npPwrResetsCounter=npPwrResetsCounter, npThermoTrapLow=npThermoTrapLow, npRelHumSensorValueT100=npRelHumSensorValueT100, npForcedReboot=npForcedReboot, npRelHumTraps=npRelHumTraps, npPwr=npPwr, npRelHumSafeRangeLow=npRelHumSafeRangeLow, npThermoLow=npThermoLow, npThermoTrapStatus=npThermoTrapStatus, npPwrChannelN=npPwrChannelN, npRelayMode=npRelayMode, npThermoHigh=npThermoHigh, npGsm=npGsm, npReboot=npReboot, npBatteryPok=npBatteryPok, lightcom=lightcom, npGsmTraps=npGsmTraps, npThermoMemo=npThermoMemo, npBatteryChg=npBatteryChg, npRelayState=npRelayState, npRelHumTrap=npRelHumTrap, npThermoTrapMemo=npThermoTrapMemo, npBatteryInfo=npBatteryInfo, npGsmRegistration=npGsmRegistration, npGsmInfo=npGsmInfo, npRelHumSensorStatusH=npRelHumSensorStatusH, npThermoTable=npThermoTable, npRelayFlip=npRelayFlip, npRelay=npRelay, npRelHumSensorValueT=npRelHumSensorValueT, npRelHumSensorValueH=npRelHumSensorValueH, npRelHumSafeRangeHigh=npRelHumSafeRangeHigh, npRelayPowered=npRelayPowered, npPwrStartReset=npPwrStartReset, npPwrMemo=npPwrMemo, npRelHumSensor=npRelHumSensor, npThermoEntry=npThermoEntry, npRelayEntry=npRelayEntry, npRelHumSensorStatus=npRelHumSensorStatus, npThermoTrapValue=npThermoTrapValue, npGsmFailed=npGsmFailed, npGsmTrapPrefix=npGsmTrapPrefix, npRelayN=npRelayN, npGsmTrap=npGsmTrap, npPwrEntry=npPwrEntry, npRelayMemo=npRelayMemo, npThermoValue=npThermoValue, npRelHumTrapPrefix=npRelHumTrapPrefix)
| 119.484375 | 2,113 | 0.768885 | 2,752 | 22,941 | 6.408067 | 0.147529 | 0.071449 | 0.125035 | 0.015424 | 0.51704 | 0.375447 | 0.332124 | 0.298044 | 0.276609 | 0.229033 | 0 | 0.078451 | 0.08487 | 22,941 | 191 | 2,114 | 120.109948 | 0.761551 | 0.014123 | 0 | 0 | 0 | 0.043716 | 0.21802 | 0.007166 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.038251 | 0 | 0.038251 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ea8ba56015942cb5fae3b931b7a1f2e85fe8b327 | 856 | py | Python | test.py | SylvainStak/BrainFuck-Compiler | 8715a05b7bb9a8b82d574bdc4c8d2b4e9f19efc7 | [
"MIT"
] | null | null | null | test.py | SylvainStak/BrainFuck-Compiler | 8715a05b7bb9a8b82d574bdc4c8d2b4e9f19efc7 | [
"MIT"
] | null | null | null | test.py | SylvainStak/BrainFuck-Compiler | 8715a05b7bb9a8b82d574bdc4c8d2b4e9f19efc7 | [
"MIT"
] | null | null | null | import unittest
from BrainFuck import BrainFuck
class Test(unittest.TestCase):
def tests(self):
# given input - expected output
codeTestCases = [
['#',''],
['+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++.','A'],
['+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++..','AA'],
['+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++.+.+.','ABC'],
['++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++.+.','HI'],
['+++++++++++++++++++++++++++++++++++++++++++++++++++++.-.-.-.-.','54321'],
['++++++>++++++++++++++++++++++++++++++++++++++++++++++++.','0'],
['>++++++<++++++++++++++++++++++++++++++++++++++++++++++++.','0'],
]
for i in codeTestCases:
BF = BrainFuck(i[0])
self.assertEqual(BF.run(), i[1])
if __name__ == '__main__':
unittest.main()
| 35.666667 | 88 | 0.251168 | 44 | 856 | 4.704545 | 0.704545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011719 | 0.102804 | 856 | 23 | 89 | 37.217391 | 0.257813 | 0.033879 | 0 | 0 | 0 | 0 | 0.578182 | 0.549091 | 0 | 0 | 0 | 0 | 0.052632 | 1 | 0.052632 | false | 0 | 0.105263 | 0 | 0.210526 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
ea98e0c5b7953fe992dacefcfca390782a22d59b | 431 | py | Python | portable_spreadsheet/__init__.py | david-salac/Portable-spreadsheet-generator | ca31b1e77f26b77ab2ca4d328b12e3cf14c8a029 | [
"MIT"
] | 28 | 2020-05-17T18:42:07.000Z | 2021-06-01T14:58:22.000Z | portable_spreadsheet/__init__.py | david-salac/Portable-spreadsheet-generator | ca31b1e77f26b77ab2ca4d328b12e3cf14c8a029 | [
"MIT"
] | 3 | 2020-08-19T21:28:17.000Z | 2021-01-10T20:00:56.000Z | portable_spreadsheet/__init__.py | david-salac/Portable-spreadsheet-generator | ca31b1e77f26b77ab2ca4d328b12e3cf14c8a029 | [
"MIT"
] | 4 | 2020-10-10T12:15:40.000Z | 2021-11-08T02:08:34.000Z | from .work_book import WorkBook, ExcelParameters, DictionaryParameters, ListParameters # noqa
from .sheet import Sheet # noqa
from .cell import Cell # noqa
from .cell_indices import CellIndices # noqa
from .cell_slice import CellSlice # noqa
from .grammars import GRAMMARS # noqa
from .grammar_utils import GrammarUtils # noqa
from .skipped_label import SkippedLabel # noqa
__version__ = "2.2.1"
__status__ = "Production"
| 35.916667 | 94 | 0.784223 | 54 | 431 | 6.018519 | 0.5 | 0.172308 | 0.110769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008219 | 0.153132 | 431 | 11 | 95 | 39.181818 | 0.882192 | 0.090487 | 0 | 0 | 0 | 0 | 0.039164 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.8 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
eaa2882da35520840b85025e87f4eb3971d72675 | 1,534 | py | Python | athenet/algorithm/derest/layers/dropout.py | heurezjusz/Athena | 0bdda97b0e06dbb3c1699d4ed7875e4adc96d580 | [
"BSD-2-Clause"
] | 2 | 2016-02-02T12:59:39.000Z | 2018-03-29T17:17:11.000Z | athenet/algorithm/derest/layers/dropout.py | heurezjusz/Athenet | 0bdda97b0e06dbb3c1699d4ed7875e4adc96d580 | [
"BSD-2-Clause"
] | 5 | 2016-01-10T23:23:57.000Z | 2016-03-26T16:29:42.000Z | athenet/algorithm/derest/layers/dropout.py | heurezjusz/Athena | 0bdda97b0e06dbb3c1699d4ed7875e4adc96d580 | [
"BSD-2-Clause"
] | 1 | 2020-02-26T20:19:17.000Z | 2020-02-26T20:19:17.000Z | from athenet.algorithm.derest.layers import DerestLayer
from athenet.algorithm.numlike import assert_numlike
class DerestDropoutLayer(DerestLayer):
def _count_activation(self, layer_input):
"""
Returns estimated activations
:param Numlike layer_input:
:return Numlike:
"""
return a_dropout(layer_input, self.layer.p_dropout)
def _count_derivatives(self, layer_output, input_shape):
"""
Returns estimated impact of input of layer on output of network
:param Numlike layer_output:
:param tuple input_shape:
:return Numlike:
"""
return d_dropout(layer_output, self.layer.p_dropout)
def a_dropout(layer_input, p_dropout):
"""Returns estimated activation of dropout layer.
:param Numlike layer_input: input Numlike
:param float p_dropout: probability of dropping in dropout
:rtype: Numlike
"""
assert_numlike(layer_input)
return layer_input * (1.0 - p_dropout)
def d_dropout(output, p_dropout):
"""Returns estimated impact of input of dropout layer on output of network.
:param Numlike output: estimated impact of output of layer on output
of network in shape (batch_size, number of channels,
height, width)
:param float p_dropout: probability of dropping in dropout
:returns: Estimated impact of input on output of network
:rtype: Numlike
"""
assert_numlike(output)
return output * (1.0 - p_dropout)
| 30.68 | 79 | 0.679922 | 189 | 1,534 | 5.349206 | 0.248677 | 0.063304 | 0.06726 | 0.06726 | 0.322453 | 0.282888 | 0.162216 | 0.094955 | 0.094955 | 0 | 0 | 0.003487 | 0.252282 | 1,534 | 49 | 80 | 31.306122 | 0.877942 | 0.499348 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.230769 | 1 | 0.307692 | false | 0 | 0.153846 | 0 | 0.846154 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
57782f41da68f3ba776bdc8d5b57ef5139a66c05 | 1,035 | py | Python | gora/models/course.py | OneTesseractInMultiverse/G-ra | 1b91764620ba7a6400c64aa423f508837e71d226 | [
"MIT"
] | null | null | null | gora/models/course.py | OneTesseractInMultiverse/G-ra | 1b91764620ba7a6400c64aa423f508837e71d226 | [
"MIT"
] | null | null | null | gora/models/course.py | OneTesseractInMultiverse/G-ra | 1b91764620ba7a6400c64aa423f508837e71d226 | [
"MIT"
] | null | null | null | import datetime
import uuid
import nacl.pwhash
from mongoengine import *
from werkzeug.security import safe_str_cmp
from nacl.pwhash import verify_scryptsalsa208sha256
from gora import app
from gora.extensions.security.crypto.entropy import gen_salt
from gora.extensions.security.crypto.message_integrity import compute_hash
# ------------------------------------------------------------------------------
# CLASS COURSE
# ------------------------------------------------------------------------------
class Course:
"""
Represents a course that is been given in the determined educational
institution.
"""
course_id = StringField(max_length=40, required=True)
title = StringField(max_length=256, required=True)
coordinator_id = StringField(max_length=40, required=True)
starts = DateTimeField(required=True)
ends = DateTimeField(required=True)
level = StringField(max_length=120, required=True)
description = StringField(max_length=512, required=True)
| 27.236842 | 80 | 0.633816 | 107 | 1,035 | 6.009346 | 0.514019 | 0.130638 | 0.155521 | 0.080871 | 0.211509 | 0.111975 | 0.111975 | 0 | 0 | 0 | 0 | 0.021764 | 0.156522 | 1,035 | 37 | 81 | 27.972973 | 0.714777 | 0.246377 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.529412 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
578e41e68d8c3ec784d0bc841b62a20ad1e57a11 | 7,779 | py | Python | api/services/email_templates.py | Meeqan/trisixty-buys-API | b747ec92c6d6af329fef5781ecd6f1433a5b5393 | [
"MIT"
] | null | null | null | api/services/email_templates.py | Meeqan/trisixty-buys-API | b747ec92c6d6af329fef5781ecd6f1433a5b5393 | [
"MIT"
] | null | null | null | api/services/email_templates.py | Meeqan/trisixty-buys-API | b747ec92c6d6af329fef5781ecd6f1433a5b5393 | [
"MIT"
] | null | null | null | """Email templates"""
verification = \
"""
<html>
<head></head>
<body>
<p>Hi!,<br>
Thanks for using 360buys! Please confirm your email address by clicking
on the link below. We'll communicate with you from time to time via email
so it's important that we have an up-to-date email address on file.<br>
<a href="{}">{}</a>.
</p>
<p>
If you did not sign up for a 360buys account
please disregard this email.<br>
Happy shopping! <br>
From 360Buys.
</p>
</body>
</html>
"""
style = """
<style type="text/css">
img {
max-width: 100%;
}
body {
-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; width: 100% !important; height: 100%; line-height: 1.6em;
}
body {
background-color: #f6f6f6;
}
@media only screen and (max-width: 640px) {
body {
padding: 0 !important;
}
h1 {
font-weight: 800 !important; margin: 20px 0 5px !important;
}
h2 {
font-weight: 800 !important; margin: 20px 0 5px !important;
}
h3 {
font-weight: 800 !important; margin: 20px 0 5px !important;
}
h4 {
font-weight: 800 !important; margin: 20px 0 5px !important;
}
h1 {
font-size: 22px !important;
}
h2 {
font-size: 18px !important;
}
h3 {
font-size: 16px !important;
}
.container {
padding: 0 !important; width: 100% !important;
}
.content {
padding: 0 !important;
}
.content-wrap {
padding: 10px !important;
}
.invoice {
width: 100% !important;
}
}
</style>
"""
email_verification = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;">
<head>
<meta name="viewport" content="width=device-width" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>{2}</title>
{0}
</head>
<body itemscope itemtype="http://schema.org/EmailMessage" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; width: 100% !important; height: 100%; line-height: 1.6em; background-color: #f6f6f6; margin: 0;" bgcolor="#f6f6f6">
<table class="body-wrap" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; width: 100%; background-color: #f6f6f6; margin: 0;" bgcolor="#f6f6f6"><tr style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><td style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0;" valign="top"></td>
<td class="container" width="600" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; display: block !important; max-width: 600px !important; clear: both !important; margin: 0 auto;" valign="top">
<div class="content" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; max-width: 600px; display: block; margin: 0 auto; padding: 20px;">
<table class="main" width="100%" cellpadding="0" cellspacing="0" itemprop="action" itemscope itemtype="http://schema.org/ConfirmAction" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; border-radius: 3px; background-color: #fff; margin: 0; border: 1px solid #e9e9e9;" bgcolor="#fff"><tr style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><td class="content-wrap" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0; padding: 20px;" valign="top">
<meta itemprop="name" content="Confirm Email" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;" /><table width="100%" cellpadding="0" cellspacing="0" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><tr style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><td class="content-block" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0; padding: 0 0 20px;" valign="top">
Please confirm your email address by clicking the link below.
</td>
</tr><tr style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><td class="content-block" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0; padding: 0 0 20px;" valign="top">
We may need to send you critical information about our service and it is important that we have an accurate email address.
</td>
</tr><tr style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><td class="content-block" itemprop="handler" itemscope itemtype="http://schema.org/HttpActionHandler" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0; padding: 0 0 20px;" valign="top">
<a href="{1}" class="btn-primary" itemprop="url" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; color: #FFF; text-decoration: none; line-height: 2em; font-weight: bold; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; text-transform: capitalize; background-color: #348eda; margin: 0; border-color: #348eda; border-style: solid; border-width: 10px 20px;">{2}</a>
</td>
</tr><tr style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><td class="content-block" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0; padding: 0 0 20px;" valign="top">
— 360Buys
</td>
</tr></table></td>
</tr></table><div class="footer" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; width: 100%; clear: both; color: #999; margin: 0; padding: 20px;">
<table width="100%" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><tr style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; margin: 0;"><td class="aligncenter content-block" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 12px; vertical-align: top; color: #999; text-align: center; margin: 0; padding: 0 0 20px;" align="center" valign="top">Follow <a href="http://twitter.com/360buys" style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 12px; color: #999; text-decoration: underline; margin: 0;">@360buys</a> on Twitter.</td>
</tr></table></div></div>
</td>
<td style="font-family: 'Helvetica Neue',Helvetica,Arial,sans-serif; box-sizing: border-box; font-size: 14px; vertical-align: top; margin: 0;" valign="top"></td>
</tr></table></body>
</html>
"""
| 68.236842 | 754 | 0.682478 | 1,101 | 7,779 | 4.821072 | 0.181653 | 0.045215 | 0.0763 | 0.12208 | 0.652788 | 0.62792 | 0.612095 | 0.581952 | 0.581952 | 0.548041 | 0 | 0.042732 | 0.145649 | 7,779 | 113 | 755 | 68.840708 | 0.755943 | 0.001928 | 0 | 0.275862 | 0 | 0.206897 | 0.990363 | 0.158799 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.183908 | 0 | 0.183908 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5791926e4269eb5db472dc898e1bfd8300dfc175 | 1,001 | py | Python | platypush/backend/sensor/mcp3008/__init__.py | BlackLight/platypush | 6c0a8bf2599eb4ab41a6122dbd988075d8b1a63a | [
"MIT"
] | 228 | 2018-01-30T11:17:09.000Z | 2022-03-24T11:22:26.000Z | platypush/backend/sensor/mcp3008/__init__.py | BlackLight/platypush | 6c0a8bf2599eb4ab41a6122dbd988075d8b1a63a | [
"MIT"
] | 167 | 2017-12-11T19:35:38.000Z | 2022-03-27T14:45:30.000Z | platypush/backend/sensor/mcp3008/__init__.py | BlackLight/runbullet | 8d26c8634d2677b4402f0a21b9ab8244b44640db | [
"MIT"
] | 16 | 2018-05-03T07:31:56.000Z | 2021-12-05T19:27:37.000Z | from platypush.backend.sensor import SensorBackend
class SensorMcp3008Backend(SensorBackend):
"""
Backend to poll analog sensor values from an MCP3008 chipset
(https://learn.adafruit.com/raspberry-pi-analog-to-digital-converters/mcp3008)
Requires:
* ``adafruit-mcp3008`` (``pip install adafruit-mcp3008``)
* The :mod:`platypush.plugins.gpio.sensor.mcp3008` plugin configured
Triggers:
* :class:`platypush.message.event.sensor.SensorDataChangeEvent` if the measurements of a sensor have changed
* :class:`platypush.message.event.sensor.SensorDataAboveThresholdEvent` if the measurements of a sensor have
gone above a configured threshold
* :class:`platypush.message.event.sensor.SensorDataBelowThresholdEvent` if the measurements of a sensor have
gone below a configured threshold
"""
def __init__(self, **kwargs):
super().__init__(plugin='gpio.sensor.mcp3008', **kwargs)
# vim:sw=4:ts=4:et:
| 34.517241 | 116 | 0.712288 | 114 | 1,001 | 6.184211 | 0.508772 | 0.059574 | 0.089362 | 0.110638 | 0.275177 | 0.139007 | 0.139007 | 0.096454 | 0 | 0 | 0 | 0.03681 | 0.185814 | 1,001 | 28 | 117 | 35.75 | 0.828221 | 0.739261 | 0 | 0 | 0 | 0 | 0.093137 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
579f8a8f7cf3b87f57b226ab55b4b788723a06ab | 637 | py | Python | tests.py | thehoodieguy/godaddy-ip-sync | 78f03f54e423b5ec244dac5d58300d7d249c77cf | [
"MIT"
] | null | null | null | tests.py | thehoodieguy/godaddy-ip-sync | 78f03f54e423b5ec244dac5d58300d7d249c77cf | [
"MIT"
] | null | null | null | tests.py | thehoodieguy/godaddy-ip-sync | 78f03f54e423b5ec244dac5d58300d7d249c77cf | [
"MIT"
] | null | null | null | import os
from pprint import pprint
import pytest
import requests
from services import GoDaddyService, IFConfigService
from urls import GoDaddyUrls, IFCONFIG_HOST
TEST_DOMAIN = os.environ["TEST_DOMAIN"]
TEST_API_KEY = os.environ["TEST_API_KEY"]
TEST_API_SECRET = os.environ["TEST_API_SECRET"]
go_daddy_service = GoDaddyService(TEST_API_KEY, TEST_API_SECRET)
ifconfig_service = IFConfigService()
def test_get_my_dns_records():
go_daddy_service.get_current_domain_ip(TEST_DOMAIN)
def test_get_my_ip():
ifconfig_service.get_my_ip()
def test_set_domain_ip():
go_daddy_service.set_domain_ip(TEST_DOMAIN, "192.168.0.5")
| 21.965517 | 64 | 0.811617 | 98 | 637 | 4.846939 | 0.346939 | 0.088421 | 0.082105 | 0.067368 | 0.096842 | 0.096842 | 0 | 0 | 0 | 0 | 0 | 0.014085 | 0.10832 | 637 | 28 | 65 | 22.75 | 0.822183 | 0 | 0 | 0 | 0 | 0 | 0.077044 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.176471 | false | 0 | 0.352941 | 0 | 0.529412 | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
57a6226cb01e6ffdd6feb38923eb9b5869b647ee | 227 | py | Python | tests/test_rbm.py | michaelhabeck/paths | c873f0ed80ca921e3c2ade7c8518af6d445bef36 | [
"MIT"
] | 5 | 2017-11-30T08:57:56.000Z | 2021-06-23T19:10:11.000Z | tests/test_rbm.py | michaelhabeck/paths | c873f0ed80ca921e3c2ade7c8518af6d445bef36 | [
"MIT"
] | null | null | null | tests/test_rbm.py | michaelhabeck/paths | c873f0ed80ca921e3c2ade7c8518af6d445bef36 | [
"MIT"
] | 1 | 2017-11-30T08:57:57.000Z | 2017-11-30T08:57:57.000Z | import os
import numpy as np
import paths as pth
params = np.load('./data/mnistvh_CD25.npz')
rbm = pth.RBM(params['a'], params['b'], params['W'])
x = rbm.sample(beta=0.)
print rbm.energy(x), rbm.energy_py(x)
| 18.916667 | 52 | 0.629956 | 39 | 227 | 3.615385 | 0.615385 | 0.056738 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016304 | 0.189427 | 227 | 11 | 53 | 20.636364 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0.114537 | 0.101322 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.428571 | null | null | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
57b83d87ff2424e950aa5da9f8c4ca71226a9385 | 63 | py | Python | projectflow/forloops.py | Th3RedMan/python-course | fa5e73903d319aeca89af91aa41a047c5158993c | [
"MIT"
] | null | null | null | projectflow/forloops.py | Th3RedMan/python-course | fa5e73903d319aeca89af91aa41a047c5158993c | [
"MIT"
] | null | null | null | projectflow/forloops.py | Th3RedMan/python-course | fa5e73903d319aeca89af91aa41a047c5158993c | [
"MIT"
] | null | null | null | parrot = "Norwegian Blue"
for char in parrot:
print(char)
| 12.6 | 25 | 0.68254 | 9 | 63 | 4.777778 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.222222 | 63 | 4 | 26 | 15.75 | 0.877551 | 0 | 0 | 0 | 0 | 0 | 0.222222 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
57d4db3bc9ec863de7fed53d6c661fca687f5dc0 | 8,400 | py | Python | custom_components/smartthinq_sensors/wideq/dryer_states.py | Pistak/ha-smartthinq-sensors | 9ce33603c35639dd726d93dffb962aff2a28118e | [
"Apache-2.0"
] | null | null | null | custom_components/smartthinq_sensors/wideq/dryer_states.py | Pistak/ha-smartthinq-sensors | 9ce33603c35639dd726d93dffb962aff2a28118e | [
"Apache-2.0"
] | null | null | null | custom_components/smartthinq_sensors/wideq/dryer_states.py | Pistak/ha-smartthinq-sensors | 9ce33603c35639dd726d93dffb962aff2a28118e | [
"Apache-2.0"
] | null | null | null | import enum
from .device import STATE_OPTIONITEM_NONE
# Dryer State
class STATE_DRYER(enum.Enum):
POWER_OFF = STATE_OPTIONITEM_NONE
COOLING = "Cooling"
DRYING = "Drying"
END = "End"
ERROR = "Error"
ERRORSTATE = "An error occurred"
INITIAL = "Select Course"
PAUSE = "Paused"
RUNNING = "Drying"
SMART_DIAGNOSIS = "Smart Diagnosis"
WRINKLE_CARE = "Wrinkle Care"
# Dryer Level
class STATE_DRYER_DRYLEVEL(enum.Enum):
OFF = STATE_OPTIONITEM_NONE
NO_SELECT = "Not selected"
COOLING = "Cooling"
CUPBOARD = "Cupboard"
DAMP = "Damp"
ECO = "Eco"
EXTRA = "Extra"
ENERGY = "Energy"
IRON = "Iron"
LESS = "Less"
LOW = "Low"
MORE = "More"
NORMAL = "Normal"
SPEED = "Speed"
VERY = "Very"
TIME_30 = "30 min"
TIME_60 = "60 min"
TIME_90 = "90 min"
TIME_120 = "120 min"
TIME_150 = "150 min"
# Dryer Temp
class STATE_DRYER_TEMP(enum.Enum):
OFF = STATE_OPTIONITEM_NONE
NO_SELECT = "Not selected"
HIGH = "High"
LOW = "Low"
MEDIUM = "Medium"
MID_HIGH = "Medium High"
ULTRA_LOW = "Ultra Low"
# Dryer Error
class STATE_DRYER_ERROR(enum.Enum):
OFF = STATE_OPTIONITEM_NONE
NO_ERROR = "Normal"
ERROR_AE = "AE - Contact Service Center"
ERROR_CE1 = "CE1 - Contact Service Center"
ERROR_DE = "Door open - Please close the door"
ERROR_DE4 = "DE4 - Contact Service Center"
ERROR_EMPTYWATER = "Error Empty Water"
ERROR_F1 = "F1 - Contact Service Center"
ERROR_LE1 = "LE1 - Contact Service Center"
ERROR_LE2 = "LE2 - Contact Service Center"
ERROR_NOFILTER = "Error No Filter"
ERROR_NP = "NP - Contact Service Center"
ERROR_OE = "Drain error - Please make sure the pipe is not clogged/frozen"
ERROR_PS = "PS - Contact Service Center"
ERROR_TE1 = "TE1 - Contact Service Center"
ERROR_TE2 = "TE2 - Contact Service Center"
ERROR_TE5 = "TE5 - Contact Service Center"
ERROR_TE6 = "TE6 - Contact Service Center"
"""------------------for Dryers"""
DRYERSTATES = {
"ERROR": STATE_DRYER.ERROR,
"@WM_STATE_COOLING_W": STATE_DRYER.COOLING,
"@WM_STATE_END_W": STATE_DRYER.END,
"@WM_STATE_ERROR_W": STATE_DRYER.ERRORSTATE,
"@WM_STATE_DRYING_W": STATE_DRYER.DRYING,
"@WM_STATE_INITIAL_W": STATE_DRYER.INITIAL,
"@WM_STATE_POWER_OFF_W": STATE_DRYER.POWER_OFF,
"@WM_STATE_PAUSE_W": STATE_DRYER.PAUSE,
"@WM_STATE_RUNNING_W": STATE_DRYER.RUNNING,
"@WM_STATE_SMART_DIAGNOSIS_W": STATE_DRYER.SMART_DIAGNOSIS,
"@WM_STATE_WRINKLECARE_W": STATE_DRYER.WRINKLE_CARE,
}
DRYERDRYLEVELS = {
"-": STATE_DRYER_DRYLEVEL.OFF,
"OFF": STATE_DRYER_DRYLEVEL.OFF,
"NOT_SELECTED": STATE_DRYER_DRYLEVEL.NO_SELECT,
"@WM_TERM_NO_SELECT_W": STATE_DRYER_DRYLEVEL.NO_SELECT,
"@WM_OPTION_DRY_DAMP_DRY_W": STATE_DRYER_DRYLEVEL.DAMP,
"@WM_OPTION_DRY_LESS_DRY_W": STATE_DRYER_DRYLEVEL.LESS,
"@WM_OPTION_DRY_MORE_DRY_W": STATE_DRYER_DRYLEVEL.MORE,
"@WM_OPTION_DRY_NORMAL_W": STATE_DRYER_DRYLEVEL.NORMAL,
"@WM_OPTION_DRY_VERY_DRY_W": STATE_DRYER_DRYLEVEL.VERY,
"@WM_DRY24_DRY_LEVEL_CUPBOARD_W": STATE_DRYER_DRYLEVEL.CUPBOARD,
"@WM_DRY24_DRY_LEVEL_DAMP_W": STATE_DRYER_DRYLEVEL.DAMP,
"@WM_DRY24_DRY_LEVEL_EXTRA_W": STATE_DRYER_DRYLEVEL.EXTRA,
"@WM_DRY24_DRY_LEVEL_IRON_W": STATE_DRYER_DRYLEVEL.IRON,
"@WM_DRY24_DRY_LEVEL_LESS_W": STATE_DRYER_DRYLEVEL.LESS,
"@WM_DRY24_DRY_LEVEL_MORE_W": STATE_DRYER_DRYLEVEL.MORE,
"@WM_DRY24_DRY_LEVEL_NORMAL_W": STATE_DRYER_DRYLEVEL.NORMAL,
"@WM_DRY24_DRY_LEVEL_VERY_W": STATE_DRYER_DRYLEVEL.VERY,
"@WM_DRY27_DRY_LEVEL_CUPBOARD_W": STATE_DRYER_DRYLEVEL.CUPBOARD,
"@WM_DRY27_DRY_LEVEL_DAMP_W": STATE_DRYER_DRYLEVEL.DAMP,
"@WM_DRY27_DRY_LEVEL_EXTRA_W": STATE_DRYER_DRYLEVEL.EXTRA,
"@WM_DRY27_DRY_LEVEL_IRON_W": STATE_DRYER_DRYLEVEL.IRON,
"@WM_DRY27_DRY_LEVEL_LESS_W": STATE_DRYER_DRYLEVEL.LESS,
"@WM_DRY27_DRY_LEVEL_MORE_W": STATE_DRYER_DRYLEVEL.MORE,
"@WM_DRY27_DRY_LEVEL_NORMAL_W": STATE_DRYER_DRYLEVEL.NORMAL,
"@WM_DRY27_DRY_LEVEL_VERY_W": STATE_DRYER_DRYLEVEL.VERY,
"@WM_TITAN2_OPTION_DRY_NORMAL_W": STATE_DRYER_DRYLEVEL.NORMAL,
"@WM_TITAN2_OPTION_DRY_ECO_W": STATE_DRYER_DRYLEVEL.ECO,
"@WM_TITAN2_OPTION_DRY_VERY_W": STATE_DRYER_DRYLEVEL.VERY,
"@WM_TITAN2_OPTION_DRY_IRON_W": STATE_DRYER_DRYLEVEL.IRON,
"@WM_TITAN2_OPTION_DRY_LOW_W": STATE_DRYER_DRYLEVEL.LOW,
"@WM_TITAN2_OPTION_DRY_ENERGY_W": STATE_DRYER_DRYLEVEL.ENERGY,
"@WM_TITAN2_OPTION_DRY_SPEED_W": STATE_DRYER_DRYLEVEL.SPEED,
"@WM_TITAN2_OPTION_DRY_COOLING_W": STATE_DRYER_DRYLEVEL.COOLING,
"@WM_TITAN2_OPTION_DRY_30_W": STATE_DRYER_DRYLEVEL.TIME_30,
"@WM_TITAN2_OPTION_DRY_60_W": STATE_DRYER_DRYLEVEL.TIME_60,
"@WM_TITAN2_OPTION_DRY_90_W": STATE_DRYER_DRYLEVEL.TIME_90,
"@WM_TITAN2_OPTION_DRY_120_W": STATE_DRYER_DRYLEVEL.TIME_120,
"@WM_TITAN2_OPTION_DRY_150_W": STATE_DRYER_DRYLEVEL.TIME_150,
"@WM_FL24_TITAN_DRY_NORMAL_W": STATE_DRYER_DRYLEVEL.NORMAL,
"@WM_FL24_TITAN_DRY_ECO_W": STATE_DRYER_DRYLEVEL.ECO,
"@WM_FL24_TITAN_DRY_VERY_W": STATE_DRYER_DRYLEVEL.VERY,
"@WM_FL24_TITAN_DRY_IRON_W": STATE_DRYER_DRYLEVEL.IRON,
"@WM_FL24_TITAN_DRY_LOW_W": STATE_DRYER_DRYLEVEL.LOW,
"@WM_FL24_TITAN_DRY_ENERGY_W": STATE_DRYER_DRYLEVEL.ENERGY,
"@WM_FL24_TITAN_DRY_SPEED_W": STATE_DRYER_DRYLEVEL.SPEED,
"@WM_FL24_TITAN_DRY_COOLING_W": STATE_DRYER_DRYLEVEL.COOLING,
"@WM_FL24_TITAN_DRY_30_W": STATE_DRYER_DRYLEVEL.TIME_30,
"@WM_FL24_TITAN_DRY_60_W": STATE_DRYER_DRYLEVEL.TIME_60,
"@WM_FL24_TITAN_DRY_90_W": STATE_DRYER_DRYLEVEL.TIME_90,
"@WM_FL24_TITAN_DRY_120_W": STATE_DRYER_DRYLEVEL.TIME_120,
"@WM_FL24_TITAN_DRY_150_W": STATE_DRYER_DRYLEVEL.TIME_150,
}
DRYERTEMPS = {
"-": STATE_DRYER_TEMP.OFF,
"OFF": STATE_DRYER_TEMP.OFF,
"@WM_TERM_NO_SELECT_W": STATE_DRYER_TEMP.NO_SELECT,
"@WM_OPTION_TEMP_HIGH_W": STATE_DRYER_TEMP.HIGH,
"@WM_OPTION_TEMP_LOW_W": STATE_DRYER_TEMP.LOW,
"@WM_OPTION_TEMP_MEDIUM_W": STATE_DRYER_TEMP.MEDIUM,
"@WM_OPTION_TEMP_MEDIUM_HIGH_W": STATE_DRYER_TEMP.MID_HIGH,
"@WM_OPTION_TEMP_ULTRA_LOW_W": STATE_DRYER_TEMP.ULTRA_LOW,
"@WM_DRY27_TEMP_HIGH_W": STATE_DRYER_TEMP.HIGH,
"@WM_DRY27_TEMP_LOW_W": STATE_DRYER_TEMP.LOW,
"@WM_DRY27_TEMP_MEDIUM_W": STATE_DRYER_TEMP.MEDIUM,
"@WM_DRY27_TEMP_MID_HIGH_W": STATE_DRYER_TEMP.MID_HIGH,
"@WM_DRY27_TEMP_ULTRA_LOW_W": STATE_DRYER_TEMP.ULTRA_LOW,
}
DRYERREFERRORS = {
"OFF": STATE_DRYER_ERROR.OFF,
"No Error": STATE_DRYER_ERROR.NO_ERROR,
"AE Error": STATE_DRYER_ERROR.ERROR_AE,
"CE1 Error": STATE_DRYER_ERROR.ERROR_CE1,
"DE Error": STATE_DRYER_ERROR.ERROR_DE,
"DE4 Error": STATE_DRYER_ERROR.ERROR_DE4,
"EMPTYWATER Error": STATE_DRYER_ERROR.ERROR_EMPTYWATER,
"F1 Error": STATE_DRYER_ERROR.ERROR_F1,
"LE1 Error": STATE_DRYER_ERROR.ERROR_LE1,
"LE2 Error": STATE_DRYER_ERROR.ERROR_LE2,
"NOFILTER Error": STATE_DRYER_ERROR.ERROR_NOFILTER,
"NP Error": STATE_DRYER_ERROR.ERROR_NP,
"OE Error": STATE_DRYER_ERROR.ERROR_OE,
"PS Error": STATE_DRYER_ERROR.ERROR_PS,
"TE1 Error": STATE_DRYER_ERROR.ERROR_TE1,
"TE2 Error": STATE_DRYER_ERROR.ERROR_TE2,
"TE5 Error": STATE_DRYER_ERROR.ERROR_TE5,
"TE6 Error": STATE_DRYER_ERROR.ERROR_TE6,
}
# this is not used
DRYERERRORS = {
"OFF": STATE_DRYER_ERROR.OFF,
"ERROR_NOERROR": STATE_DRYER_ERROR.NO_ERROR,
"@WM_US_DRYER_ERROR_AE_W": STATE_DRYER_ERROR.ERROR_AE,
"@WM_US_DRYER_ERROR_CE1_W": STATE_DRYER_ERROR.ERROR_CE1,
"@WM_US_DRYER_ERROR_DE_W": STATE_DRYER_ERROR.ERROR_DE,
"@WM_WW_FL_ERROR_DE4_W": STATE_DRYER_ERROR.ERROR_DE4,
"@WM_US_DRYER_ERROR_EMPTYWATER_W": STATE_DRYER_ERROR.ERROR_EMPTYWATER,
"@WM_US_DRYER_ERROR_F1_W": STATE_DRYER_ERROR.ERROR_F1,
"@WM_US_DRYER_ERROR_LE1_W": STATE_DRYER_ERROR.ERROR_LE1,
"@WM_US_DRYER_ERROR_LE2_W": STATE_DRYER_ERROR.ERROR_LE2,
"@WM_US_DRYER_ERROR_NOFILTER_W": STATE_DRYER_ERROR.ERROR_NOFILTER,
"@WM_US_DRYER_ERROR_NP_GAS_W": STATE_DRYER_ERROR.ERROR_NP,
"@WM_US_DRYER_ERROR_PS_W": STATE_DRYER_ERROR.ERROR_PS,
"@WM_US_DRYER_ERROR_OE_W": STATE_DRYER_ERROR.ERROR_OE,
"@WM_US_DRYER_ERROR_TE1_W": STATE_DRYER_ERROR.ERROR_TE1,
"@WM_US_DRYER_ERROR_TE2_W": STATE_DRYER_ERROR.ERROR_TE2,
"@WM_US_DRYER_ERROR_TE5_W": STATE_DRYER_ERROR.ERROR_TE5,
"@WM_US_DRYER_ERROR_TE6_W": STATE_DRYER_ERROR.ERROR_TE6,
}
| 40.776699 | 78 | 0.749524 | 1,282 | 8,400 | 4.355694 | 0.090484 | 0.205946 | 0.167443 | 0.163324 | 0.558023 | 0.389327 | 0.363539 | 0.348854 | 0.211676 | 0.033309 | 0 | 0.028854 | 0.145952 | 8,400 | 205 | 79 | 40.97561 | 0.749512 | 0.0075 | 0 | 0.060109 | 0 | 0 | 0.362782 | 0.231168 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.010929 | 0 | 0.338798 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
57e24fd1daf5d1efe6c7e844526161ccfbee5345 | 2,457 | py | Python | lib/layer_utils/layer_regularizers.py | BryanPlummer/phrase_detection | febe4d2e02a0467850cdf97fb3d3c3c5592be9a2 | [
"MIT"
] | 7 | 2019-11-15T13:16:55.000Z | 2021-11-10T18:19:58.000Z | lib/layer_utils/layer_regularizers.py | BryanPlummer/phrase_detection | febe4d2e02a0467850cdf97fb3d3c3c5592be9a2 | [
"MIT"
] | 1 | 2021-09-07T13:28:49.000Z | 2021-09-07T13:28:49.000Z | lib/layer_utils/layer_regularizers.py | BryanPlummer/phrase_detection | febe4d2e02a0467850cdf97fb3d3c3c5592be9a2 | [
"MIT"
] | null | null | null | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Regularizers for use with layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numbers
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import standard_ops
from tensorflow.python.platform import tf_logging as logging
def weight_l2_regularizer(initial_weights, scale, scope=None):
"""Returns a function that can be used to apply L2 regularization to weights.
Small values of L2 can help prevent overfitting the training data.
Args:
scale: A scalar multiplier `Tensor`. 0.0 disables the regularizer.
scope: An optional scope name.
Returns:
A function with signature `l2(weights)` that applies L2 regularization.
Raises:
ValueError: If scale is negative or if scale is not a float.
"""
if isinstance(scale, numbers.Integral):
raise ValueError('scale cannot be an integer: %s' % (scale,))
if isinstance(scale, numbers.Real):
if scale < 0.:
raise ValueError('Setting a scale less than 0 on a regularizer: %g.' %
scale)
if scale == 0.:
logging.info('Scale of 0 disables regularizer.')
return lambda _: None
def l2(weights):
"""Applies l2 regularization to weights."""
with ops.name_scope(scope, 'l2_regularizer', [weights]) as name:
my_scale = ops.convert_to_tensor(scale,
dtype=weights.dtype.base_dtype,
name='scale')
weight_diff = initial_weights - weights
return standard_ops.multiply(my_scale, nn.l2_loss(weight_diff), name=name)
return l2
| 40.278689 | 80 | 0.697192 | 330 | 2,457 | 5.087879 | 0.439394 | 0.035736 | 0.071471 | 0.041096 | 0.097082 | 0.038118 | 0 | 0 | 0 | 0 | 0 | 0.012201 | 0.19943 | 2,457 | 60 | 81 | 40.95 | 0.841383 | 0.462759 | 0 | 0 | 0 | 0 | 0.102201 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.357143 | 0 | 0.535714 | 0.035714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
57e7ef34d1c45fa1d79c4592fe3c1da6b87f07b3 | 115 | py | Python | src/comicsdownload/__init__.py | IaninaK/Download_xkcd | 7578917a1e60c281f815c0bcdca09ae0fcf1eaca | [
"MIT"
] | null | null | null | src/comicsdownload/__init__.py | IaninaK/Download_xkcd | 7578917a1e60c281f815c0bcdca09ae0fcf1eaca | [
"MIT"
] | null | null | null | src/comicsdownload/__init__.py | IaninaK/Download_xkcd | 7578917a1e60c281f815c0bcdca09ae0fcf1eaca | [
"MIT"
] | null | null | null | """comics_download
By IaninaK susie@example.com
Downloads multiple comics from xkcd.com"""
__version__ = '0.1.0'
| 16.428571 | 42 | 0.756522 | 17 | 115 | 4.823529 | 0.823529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.029703 | 0.121739 | 115 | 6 | 43 | 19.166667 | 0.782178 | 0.73913 | 0 | 0 | 0 | 0 | 0.208333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
57e8950693c2d70a702448bbf567cedff8db4692 | 929 | py | Python | invoice/migrations/0002_trainer.py | surajraju20/fuzzy1 | 596d39facc657fca49e6630b8149ce3662534a9a | [
"MIT"
] | null | null | null | invoice/migrations/0002_trainer.py | surajraju20/fuzzy1 | 596d39facc657fca49e6630b8149ce3662534a9a | [
"MIT"
] | null | null | null | invoice/migrations/0002_trainer.py | surajraju20/fuzzy1 | 596d39facc657fca49e6630b8149ce3662534a9a | [
"MIT"
] | null | null | null | # Generated by Django 3.1.7 on 2021-04-04 11:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('invoice', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Trainer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('t_name', models.CharField(max_length=100)),
('acc_no', models.IntegerField()),
('ifsc', models.CharField(max_length=100)),
('pan', models.CharField(max_length=100)),
('bank_name', models.CharField(max_length=100)),
('phone_number', models.IntegerField()),
('email_id', models.EmailField(max_length=250)),
('t_location', models.CharField(max_length=100)),
],
),
]
| 33.178571 | 114 | 0.558665 | 94 | 929 | 5.351064 | 0.574468 | 0.107356 | 0.178926 | 0.238569 | 0.284294 | 0.12326 | 0 | 0 | 0 | 0 | 0 | 0.056662 | 0.297094 | 929 | 27 | 115 | 34.407407 | 0.713629 | 0.048439 | 0 | 0 | 1 | 0 | 0.099773 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.047619 | 0 | 0.190476 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
57eb50ef9b34e6e7ee0f749384922651ed88ea4e | 213 | py | Python | 4-ManipulandoTexto/des24.py | FelipeTellini/Python | 769065f74b7e44d6e31e75a3963178cd89d9afd2 | [
"MIT"
] | null | null | null | 4-ManipulandoTexto/des24.py | FelipeTellini/Python | 769065f74b7e44d6e31e75a3963178cd89d9afd2 | [
"MIT"
] | null | null | null | 4-ManipulandoTexto/des24.py | FelipeTellini/Python | 769065f74b7e44d6e31e75a3963178cd89d9afd2 | [
"MIT"
] | null | null | null | print('\nVerificando as primeiras letras de um texto\n')
cid = str(input('Que cidade você nasceu ? ')).strip()
print(cid[:5].lower() == 'santo')
fim = input('\nCurso de Python no YouTube, canal CURSO EM VIDEO.') | 35.5 | 66 | 0.685446 | 33 | 213 | 4.424242 | 0.878788 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005464 | 0.140845 | 213 | 6 | 66 | 35.5 | 0.79235 | 0 | 0 | 0 | 0 | 0 | 0.598131 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
57f179feaad9956fca6c4e7fd461ff904c59b3cc | 17,454 | py | Python | pickles/migrations/0004_auto_20220320_1107.py | horatius83/thepicklebook | caf02e1c560bfd1ef7077d85d055d9dfdd6e9073 | [
"MIT"
] | null | null | null | pickles/migrations/0004_auto_20220320_1107.py | horatius83/thepicklebook | caf02e1c560bfd1ef7077d85d055d9dfdd6e9073 | [
"MIT"
] | null | null | null | pickles/migrations/0004_auto_20220320_1107.py | horatius83/thepicklebook | caf02e1c560bfd1ef7077d85d055d9dfdd6e9073 | [
"MIT"
] | null | null | null | # Generated by Django 4.0.3 on 2022-03-20 16:07
from django.db import migrations
def seed_pickles(apps, schema_editor):
pickles = [
("Ahold", "Ahold Pickle Chips Hamburger Dills"),
("The Brinery", "The Brinery Pickles Jape Kin Cod"),
("Marco Polo", "Marco Polo Home Made Pickled Mushrooms"),
("Woodstock", "Woodstock Organic Kosher Dill Pickles Deli Style"),
("Ahold", "Ahold Sauerkraut"),
("Vlasic", "Vlasic Stackers Bread & Butter Pickles"),
("Bell-view", "Bell-view Dill Pickles"),
("Ba-tampte", "Ba-tampte Half Sour Pickles"),
("Miss Jenny's", "Miss Jenny's Gluten-free Bread & Butter Pickles"),
("Silver Floss", "Silver Floss Shredded Sauerkraut"),
("Snyder Of Berlin", "Snyder Of Berlin Vlasic Dill Pickle Potato Chips"),
("Silver Floss", "Silver Floss Shredded Sauerkraut"),
("San Marcos", "San Marcos Pickled Sliced Carrots"),
("Deep", "Deep Lime Pickle Sweet"),
("Vlasic", "Vlasic Kosher Dill Pickle Spears"),
("Lay's", "Lay's Potato Chips Dill Pickle"),
("Ahold", "Ahold Shredded Sauerkraut"),
("The Brinery", "The Brinery Kimchi Oh Gee"),
("Mrs. Wages", "Mrs. Wages Kosher Dill Pickles Quick Process Pickle Mix"),
("Aunt Nellie's", "Aunt Nellie's Pickled Beets Whole"),
("Ahold", "Ahold Sliced Pickled Beets"),
("Boar's Head", "Boar's Head Kosher Dill Pickle Spears"),
("Deep", "Mango Titbit (sweet) Katki Pickle"),
("Ball", "Ball Bread & Butter Pickle Mix"),
("B&g", "B&g Pickle Chips Bread & Butter"),
("Puckered Pickle", "Puckered Pickle Co. Kosher Dill Pickle Spears"),
("Goya", "Goya Chiles Manzanos Pickled Manzano Peppers"),
("Vlasic", "Vlasic Stackers Kosher Dill Pickles"),
("Boar's Head", "Boar's Head Sauerkraut"),
("Othentic", "Othentic Organic Sauerkraut"),
("Bell-view", "Bell-view Sweet Pickles"),
("Farm Rich", "Farm Rich Crispy Dill Pickles Breaded Dill Pickle Slices"),
("The Brinery", "The Brinery Sauerkraut Stimulus Package"),
("Vlasic", "Vlasic Kosher Dill Whole Pickles"),
("Ahold", "Ahold Shredded Sauerkraut"),
("Reese", "Reese Pickled Whole Baby Corn"),
("Aunt Nellie's", "Aunt Nellie's Whole Pickled Baby Beets"),
("Ahold", "Ahold Pickle Chips Hamburger Dill"),
("Ahold", "Ahold Pickles Whole Sweet Gherkins No Sugar Added"),
("Ahold", "Ahold Pickle Chips Kosher Dills"),
("Mrs. Wages", "Mrs. Wages Dill Pickles Quick Process Pickle Mix"),
("Vlasic", "Vlasic Reduced Sodium Kosher Dill Spears Pickles"),
("Cains", "Cains Reduced Sodium Hamburger Pickle Chips"),
("Simply Asia", "Simply Asia Singapore Street Noodles Kimchi"),
("Bell-view", "Bell-view Polish Pickles Dill"),
("Vlasic", "Vlasic Kosher Dill Pickle Spears"),
("Giuliano", "Giuliano Specialty Italian Style Dill Pickles"),
("Woodstock", "Woodstock Organic Bread & Butter Pickles"),
("Claussen", "Claussen Deli-style Hearty Garlic Whole Pickles"),
("Greenwood", "Greenwood Whole Pickled Beets Sweet & Tangy"),
("Bubbies", "Pickle Dill Pure Koshr"),
("Burpee", "Burpee Cucumber Picklebush"),
("Sabrett", "Sabrett Sauerkraut"),
("Bell-view", "Bell-view Cauliflower Hot Pickled"),
("Cains", "Cains No Sugar Sugar Free Sweet Pickle Relish"),
("Osem", "Osem Mediterranean Pickles"),
("Vlasic", "Vlasic Sweet Pickle Relish"),
("Ahold", "Ahold Whole Pickles Kosher Dill"),
("Nongshim", "Nongshim Bowl Noodle Soup Spicy Kimchi"),
("Goya", "Goya Pickled Sliced Carrots"),
("Sushi Chef", "Sushi Chef Pickled Ginger"),
("Mrs. Fanning's", "Mrs. Fanning's The Original Bread 'n Butter Pickles"),
("Kissling's", "Kissling's Sauerkraut"),
("Giuliano", "Pickle Chptle Swt Smky"),
("King's", "King's Kimchi Korean Marinated Cabbage Mild"),
("Scrumptious Pantry", "Scrumptious Pantry Heirloom Pickles Beaver Dam Peppers"),
("Ahold", "Ahold Whole Pickles Kosher Baby Dill"),
("Greenwood", "Greenwood Sweet & Tangy Sliced Pickled Beets"),
("Libby's", "Libby's Crispy Sauerkraut Jumbo-can"),
("Vlasic", "Vlasic Bread & Butter Pickle Chips"),
("Bell-view", "Bell-view Fancy Pickled Sweet Beets"),
("Vlasic", "Vlasic Reduced Sodium Kosher Dill Stackers Pickles"),
("Del Monte", "Del Monte Seasoned Vegetables Pickled Green Beans With Dill Flavor"),
("B&g", "B&g New York Deli Style Pickles"),
("Ahold", "Ahold Pickle Spears Polish Dills"),
("Ball", "Ball Kosher Dill Pickle Mix"),
("Claussen", "Claussen Premium Sweet Pickle Relish"),
("Vlasic", "Vlasic Kosher Dill Whole Pickles"),
("Mt. Olive", "Mt. Olive Bread & Butter Pickles Old-fashioned Sweet"),
("Pringles", "Pringles Xtra Potato Crisps Screamin' Dill Pickle"),
("Aunt Nellie's", "Aunt Nellie's Pickled Beets Sliced"),
("Boar's Head", "Boar's Head Pickle & Pepper Loaf"),
("The Brinery", "The Brinery Sauerkraut Storm Cloud Zapper"),
("The Brinery", "The Brinery Sauerkraut Fair N' By"),
("Miss Jenny's", "Miss Jenny's Habanero Bread & Butter Pickles"),
("Ahold", "Ahold Pickle Spears Bread & Butter No Sugar Added"),
("Ahold", "Ahold Pickle Chips Bread & Butter"),
("Deep", "Deep Amba Haldar Pickle"),
("Lancaster Canning Company", "Lancaster Canning Company Pickled Sweet Baby Beets"),
("Scrumptious Pantry", "Scrumptious Pantry Heirloom Pickles Bread & Butter Style Cucumbers"),
("Lesserevil", "Lesserevil Dill Pickle Chia Crisps Gluten-free"),
("Ahold", "Ahold Whole Pickles Sweet Gherkins"),
("Greenwood", "Greenwood Sliced Pickled Beets Sweet & Tangy"),
("Ahold", "Ahold Shredded Sauerkraut"),
("Vlasic", "Vlasic Ovals Hamburger Dill Pickle Chips"),
("Cosmo's", "Cosmo's Pickled Eggs"),
("Vlasic", "Vlasic Kosher Dill Baby Whole Pickles"),
("Silver Floss", "Silver Floss Shredded Sauerkraut"),
("Mrs. Wages", "Mrs. Wages Bread & Butter Pickles Quick Process Pickle Mix"),
("Bubbies", "Pickle Chip Brd & Bttr"),
("Scrumptious Pantry", "Scrumptious Pantry Heirloom Pickled Vegetable Lemon Cucumbers"),
("Cains", "Cains Sugar Free Sweet B&b Pickle Chips"),
("Puckered Pickle", "Puckered Pickle Co. Natural Spicy Sweet Relish"),
("Giuliano", "Pickle Dill Hot Jlpno"),
("Loeb's", "Loeb's Pickle Crunch"),
("Nabisco", "Nabisco Wheat Thins Snacks Dill Pickle"),
("Famous Dave's", "Famous Dave's Pickle Chips Signature Spicy"),
("Suckerpunch", "Suckerpunch Pickles Spicy Garlic Originals"),
("Ball", "Ball Bread & Butter Pickle Mix"),
("Van Holten's", "Van Holten's Dill Pickle Hearty Dill"),
("Ball", "Ball Kosher Dill Pickle Mix"),
("Sabrett", "Sabrett Sauerkraut"),
("Lowell Foods", "Lowell Foods Old Country Style Polish Dill Pickles"),
("Kuhne", "Kuhne Traditional German Barrel Sauerkraut"),
("Mrs. Fanning's", "Mrs. Fanning's Bread'n Butter Pickles"),
("Vlasic", "Vlasic Bread & Butter Pickle Chips"),
("Lay's", "Lay's Dill Pickle Flavored Potato Chips"),
("Miss Jenny's", "Miss Jenny's Signature Salt & Pepper Pickles"),
("Vlasic", "Vlasic Stackers Kosher Dill Pickles"),
("Boar's Head", "Boar's Head Kosher Dill Whole Pickles"),
("Ahold", "Ahold Pickle Spears Kosher Dill"),
("Ahold", "Ahold Sauerkraut"),
("B&g", "B&g Gherkins Sweet Pickles"),
("Boar's Head", "Boar's Head Sauerkraut"),
("Vlasic", "Vlasic Sweet Gherkin Pickles"),
("Lowell Foods", "Lowell Foods Dill Pickles With Sliced Peppers"),
("Del Monte", "Del Monte Specialties Crinkle Cut Pickled Beets"),
("Ahold", "Ahold Pickle Spears Kosher Dills"),
("Talk O' Texas", "Talk O' Texas Okra Pickles Crips Mild"),
("Silver Floss", "Silver Floss Shredded Sauerkraut"),
("Scrumptious Pantry", "Scrumptious Pantry Heirloom Pickles Red Beets With Fresh Thyme"),
("Ahold", "Ahold Whole Pickles Sweet Midget"),
("Giuliano", "Pickle Dill Kosher Zesty"),
("Woodstock", "Woodstock Organic Kosher Dill Pickles Sliced"),
("Ahold", "Ahold Whole Pickle Kosher Baby Dills"),
("Kuhne", "Kuhne Pickled Red Cabbage"),
("Mcclure's", "Mcclure's Kettle Cooked Potato Chips Garlic Dill Pickle"),
("Farm Rich", "Farm Rich Crispy Dill Pickles"),
("Woodstock", "Woodstock Organic Bite-sized Kosher Baby Dill Pickles"),
("Bell-view", "Bell-view Dill Kosher Pickles"),
("Hatfield", "Hatfield Old Fashioned Sauerkraut"),
("Claussen", "Claussen Easy Squeeze Sweet Pickle Relish"),
("Grillo's Pickles", "Grillo's Pickles Italian Dills Hot"),
("Lowell Foods", "Lowell Foods Polish Dill Pickles With Sweet Peppers"),
("Oscar Mayer", "Oscar Mayer Pickle & Pimiento Loaf"),
("Vlasic", "Vlasic Kosher Dill Baby Whole Pickles"),
("Ahold", "Ahold Pickles Whole Kosher Dill"),
("Bell-view", "Bell-view Cauliflower Hot Pickled"),
("A-grosik", "A-grosik Dill Pickles In Brine All Natural"),
("Cains", "Cains Sweet Pickle Relish"),
("Ahold", "Ahold Pickle Chips Bread & Butter"),
("Cains", "Cains Balasamic Munchers Pickles"),
("Old El Paso", "Old El Paso Pickled Jalapeno Slices"),
("Ahold", "Ahold Bread & Butter Pickle Chips No Sugar Added"),
("Wildbrine", "Sauerkraut Dill & Grlc"),
("Wildbrine", "Sauerkraut Arame & Gngr"),
("Wildbrine", "Sauerkraut Curry & Clflwr"),
("Wildbrine", "Sauerkraut Beet & Cabbage"),
("Wildbrine", "Horseradish Kimchi Miso"),
("Wildbrine", "Kimchi Korean"),
("Wildbrine", "Kimchi Thai"),
("Wildbrine", "Pickle Slc Dill"),
("King Crunch", "Pickle Dill Spear Kosher"),
("Wonderfully Raw", "Snip Chip Dill Pickle Org"),
("Gefen", "Pickle Sliced"),
("Krakus", "Sauerkraut & Carrot"),
("Krakus", "Sauerkraut"),
("Krakus", "Pickle Dill Polish"),
("Ricks Picks", "Pickle K O Org"),
("Kasias", "Pierogi Sauerkraut"),
("Annie Chuns", "Soup Bowl Kimchi"),
("Crosse & Blackwell", "Branston Pickle Orgnl"),
("Paldo", "Noodle Bowl Kimchi"),
("Othentic", "Pickle Dill Polish"),
("Othentic", "Pickle Dill Spear Swt & S"),
("Othentic", "Sauerkraut Alntrl"),
("Othentic", "Pickle Dill Polish Org"),
("Othentic", "Pickle Jewish Deli Styl Org"),
("Othentic", "Pickle Spear Swt&spcy Org"),
("Biotta", "Juice Sauerkraut"),
("Crosse & Blackwell", "Branston Pickle"),
("Ricks Picks", "Bean Grn Mean Pickled"),
("Ricks Picks", "Beet Phat Pickled"),
("Ricks Picks", "Smokra Pickled"),
("Conscious Choice", "Pickle Beer Orgnl"),
("Ba Tampte", "Sauerkraut Jar"),
("Ba Tampte", "Sauerkraut Cello"),
("Ba Tampte", "Pickle Half Sour"),
("Conscious Choice", "Pickle Dill Frances Cellar"),
("Conscious Choice", "Pickle Dill Harold Purdy Hot"),
("Conscious Choice", "Pickle Dill Harold Dern Hot"),
("Boscoli", "Asparagus Spicy Pickled"),
("Boscoli", "Garlic Spicy Pickled"),
("Boscoli", "Bean Spicy Pickled"),
("A Grosik", "Pickle Polish Dill"),
("A Grosik", "Sauerkraut"),
("A Grosik", "Pickle Gherkin"),
("A Grosik", "Pickle Polish Dill"),
("Wickles", "Pickle Orgnl"),
("Othentic", "Sauerkraut"),
("Bigs", "Seed Snflwr Vlasic Dill Pickle"),
("Puckered Pickle", "Pickle Spears 300pc"),
("Ricks Picks", "Pickle Peoples Grlc Dill"),
("Ricks Picks", "Pickle Classic Sours"),
("Lynnaes Gourmet", "Pickle Chip"),
("Lynnaes Gourmet", "Pickle Chip Hot Mama"),
("Lynnaes Gourmet", "Pickle Spear Hot Mama"),
("Puckered Pickle", "Pickle Spear Spcy Deli"),
("Martha Stewart", "Pickle Spear Snckng"),
("Martha Stewart", "Pickle Chip Zesty Hot"),
("Martha Stewart", "Okra Pickled"),
("Martha Stewart", "Pickle Chip Bread&bttr"),
("Kuhne", "Pickle Sandwich Slices"),
("Kuhne", "Pickle Gherkin Grmt Premi"),
("Del Monte", "Pickle Clsc Bread & Bttr"),
("Del Monte", "Pickle Chunk Hot&swt"),
("Del Monte", "Pickle Dill Grndma Baby"),
("Del Monte", "Pickle Spear Kshr Dill Gr"),
("Del Monte", "Pickle Chunk Dill Grlc"),
("Food Should Taste Good", "Chip Tortla Kimchi"),
("Liebers", "Pickle Dill"),
("Liebers", "Pickle Gehrkin Dill"),
("Galil", "Pickle 7-9 Xvngr"),
("Sechlers", "Pickle Candied Swt Apple"),
("Sechlers", "Pickle Swt Mixed Heat"),
("Mrs Wages", "Pickle Barrel Dsp 72pc"),
("Pataks", "Pickled Garlic"),
("Franks", "Sauerkraut Sngl Dsp 144pc"),
("Puckered Pickle", "Pickle Spear Kosher"),
("Puckered Pickle", "Pickle Whole Baby Dill"),
("Bubbies", "Pickle Relish Kosher Dill"),
("Cracovia", "Pickle Dill Polish"),
("Cracovia", "Sauerkraut"),
("Osem", "Pickle Meditern 18-25"),
("Dolores", "Pork Rinds Pickled"),
("Dolores", "Pigs Feet Pickled"),
("Tillen Farms", "Asparagus Pickled"),
("Franks", "Sauerkraut Hot Dog Dsp 54pc"),
("Franks", "Sauerkraut Juice"),
("Mrs Wages", "Mix Pickle 4flvr Dsp 72pc"),
("Pataks", "Relish Pickled Mixed"),
("Hengstenberg", "Pickle Polish"),
("Pataks", "Pickled Brinjal"),
("Tiffes", "Okra Pickled Mild"),
("Tiffes", "Okra Pickled Hot"),
("Hengstenberg", "Pickle Gherkin Knax"),
("Nong Shim", "Noodle Inst Kimchi Ramyun"),
("Osem", "Pickle Meditern 7-9"),
("Galil", "Pickle Cucumber 18-25"),
("Galil", "Pickle Cucumber 7-9"),
("Cracovia", "Beets Baby Pickled"),
("Cracovia", "Sauerkraut With Carrot"),
("Ba Tampte", "Sauerkraut Cello"),
("Miss Jennys Pickles", "Pickle Salt & Pppr Jlpno"),
("Tony Packos", "Packo Thin Slcd Pickles &"),
("Tony Packos", "Pickle & Pepper Orig"),
("Tony Packos", "Pickle Ppr Swt Hot"),
("Roland", "Pickle Dill Chip"),
("Talk O Texas", "Okra Pickled Hot"),
("Tony Packos", "Packo Pickle & Pepper Rel"),
("Kuhne", "Pickle Garlic Barrel"),
("Santa Barbara", "Asparagus Pickled Hot"),
("Franks", "Sauerkraut Can"),
("Conscious Choice", "Pickle Sissy Swt Harolds"),
("Mrs Wages", "Mix Pickle Med Spcy"),
("Mrs Wages", "Mix Pickle Brd N Btr Zsty"),
("Manischewitz", "Sauerkraut Po"),
("Ziyad", "Turnip Pickled"),
("Sechlers", "Pickle Brd & Btr Zsty Chnk"),
("Kuhne", "Sauerkraut Barrel"),
("Hengstenberg", "Pickle Gherkin Knax Lrg"),
("Pataks", "Pickled Mango Xhot"),
("Pataks", "Pickled Chile"),
("Hengstenberg", "Sauerkraut Bavarian"),
("Kuhne", "Pickle Barrel"),
("Franks", "Sauerkraut Can"),
("Pataks", "Pickled Lime Mild"),
("Pataks", "Pickled Mango Medm"),
("Tabasco", "Pickle Hot & Sweet"),
("Tabasco", "Okra Pickled Spicy"),
("Sechlers", "Pickle Swt Diced Salad"),
("Sechlers", "Pickle Candied Swt Mix"),
("Sechlers", "Pickle Candied Swt Orng Strip"),
("Sechlers", "Pickle Candied Swt Dill Strip"),
("Sechlers", "Pickle Swt"),
("Sechlers", "Relish Pickle Swt"),
("Sechlers", "Relish Dill Pickle"),
("Sechlers", "Pickle Chip Med Swt"),
("Sechlers", "Pickle Candied Swt Mixed"),
("Sechlers", "Pickle Candied Swt Raisin"),
("Sechlers", "Pickle Dill Genuine No Grlc"),
("Sechlers", "Pickle Candied Swt Gherkin"),
("Sechlers", "Pickle Candied Swt Orng Chnks"),
("Sechlers", "Pickle Brd & Btr Slice"),
("Marquis", "Pickle Gherkin"),
("Sechlers", "Pickle Cndyd Swt Dill Chn"),
("Van Holtens", "Pickle Dill Mild"),
("Van Holtens", "Pickle Dill King Sz"),
("Amish Wedding", "Beet Pickled Baby"),
("Amish Wedding", "Pickle Bread & Butter"),
("Bubbies", "Sauerkraut"),
("Ba Tampte", "Tomato Pickled"),
("Ba Tampte", "Sauerkraut Jar"),
("Ba Tampte", "Pickle Bread & Butter"),
("Ba Tampte", "Pickle Deli Whole"),
("Ba Tampte", "Pickle Deli Halve"),
("Mrs Wages", "Mix Pickle 6flvr Dsp 80pc"),
("Van Holtens", "Pickle Hot Dill"),
("Van Holtens", "Pickle Sour Dill"),
("Van Holtens", "Pickle Kosher Deli"),
("Van Holtens", "Pickle Hot Mama"),
("Ba Tampte", "Pickle Garlic Dill"),
("Franks", "Sauerkraut Sngl")
]
Pickle = apps.get_model('pickles', 'Pickle')
for (maker, name) in pickles:
p = Pickle(name=name, maker=maker)
p.save()
class Migration(migrations.Migration):
dependencies = [
('pickles', '0003_pickle_maker'),
]
operations = [
migrations.RunPython(seed_pickles)
]
| 52.257485 | 101 | 0.590982 | 1,914 | 17,454 | 5.386102 | 0.239289 | 0.022311 | 0.00873 | 0.018625 | 0.24338 | 0.166845 | 0.061112 | 0.025318 | 0.01067 | 0.01067 | 0 | 0.003762 | 0.253753 | 17,454 | 333 | 102 | 52.414414 | 0.787716 | 0.002578 | 0 | 0.118902 | 1 | 0 | 0.654507 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003049 | false | 0 | 0.003049 | 0 | 0.015244 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
17b5095261091f2499a912f3775fe5b85a5453bf | 2,959 | py | Python | profiles/models.py | geoanalytic/geopap-rest | 44b2d5e65fbabd5d61884dc6a21294ae4f09d0dd | [
"MIT"
] | null | null | null | profiles/models.py | geoanalytic/geopap-rest | 44b2d5e65fbabd5d61884dc6a21294ae4f09d0dd | [
"MIT"
] | null | null | null | profiles/models.py | geoanalytic/geopap-rest | 44b2d5e65fbabd5d61884dc6a21294ae4f09d0dd | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.postgres.fields import ArrayField
# Geopaparrazi profiles model
class Project(models.Model):
path = models.CharField(max_length=100, blank=True, default='')
modifieddate = models.DateTimeField(auto_now_add=True)
url = models.URLField(blank=True)
uploadurl = models.URLField(blank=True)
def __str__(self):
return self.path
class Tag(models.Model):
path = models.CharField(max_length=100, blank=True, default='')
modifieddate = models.DateTimeField(auto_now_add=True)
url = models.URLField(blank=True)
def __str__(self):
return self.path
class Basemap(models.Model):
path = models.CharField(max_length=100, blank=True, default='')
modifieddate = models.DateTimeField(auto_now_add=True)
url = models.URLField(blank=True)
size = models.CharField(max_length=30)
def __str__(self):
return self.path
class Spatialitedbs(models.Model):
path = models.CharField(max_length=100, blank=True, default='')
modifieddate = models.DateTimeField(auto_now_add=True)
url = models.URLField(blank=True)
size = models.CharField(max_length=30)
uploadurl = models.URLField(blank=True)
visible = ArrayField(models.CharField(max_length=30))
def __str__(self):
return self.path
class Otherfiles(models.Model):
path = models.CharField(max_length=100, blank=True, default='')
modifieddate = models.DateTimeField(auto_now_add=True)
url = models.URLField(blank=True)
def __str__(self):
return self.path
class Profile(models.Model):
name = models.CharField(max_length=100, blank=True, default='')
description = models.TextField()
creationdate = models.DateTimeField(auto_now_add=True)
modifieddate = models.DateTimeField(auto_now_add=True)
color = models.CharField(max_length=30, default="#FBC02D")
active = models.BooleanField(default=False)
sdcardPath = models.CharField(max_length=100, default="MAINSTORAGE")
mapView = models.CharField(max_length=100, default="52.02025604248047,-115.70208740234375,10.0")
project = models.ForeignKey(Project, on_delete = models.SET_NULL, blank=True, null=True)
tags = models.ForeignKey(Tag, on_delete = models.SET_NULL, blank=True, null=True)
basemaps = models.ManyToManyField(Basemap, blank=True)
spatialitedbs = models.ManyToManyField(Spatialitedbs, blank=True)
otherfiles = models.ManyToManyField(Otherfiles, blank=True)
def __str__(self):
return self.name
class Meta:
ordering = ('modifieddate', 'name', )
class ProfileSet(models.Model):
owner = models.OneToOneField('users.user', related_name='profilesets', on_delete=models.CASCADE)
profiles = models.ManyToManyField(Profile, blank=True)
formatVersion = models.FloatField(default=1.1)
def __str__(self):
return self.owner.username
| 37.935897 | 100 | 0.711727 | 353 | 2,959 | 5.796034 | 0.232295 | 0.083578 | 0.105572 | 0.140762 | 0.625611 | 0.583089 | 0.533724 | 0.497556 | 0.47654 | 0.439394 | 0 | 0.029508 | 0.175397 | 2,959 | 77 | 101 | 38.428571 | 0.809016 | 0.009125 | 0 | 0.52459 | 0 | 0 | 0.033106 | 0.014334 | 0 | 0 | 0 | 0 | 0 | 1 | 0.114754 | false | 0 | 0.032787 | 0.114754 | 0.983607 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
17b853b1dd51da041d04308736e8130c1c17033c | 616 | py | Python | testlocaleurl/testapp/urls.py | kron4eg/django-localeurl | 50da1a4226d067c54bc700088158beab3d33040a | [
"MIT"
] | 1 | 2016-05-08T21:37:28.000Z | 2016-05-08T21:37:28.000Z | testlocaleurl/testapp/urls.py | kron4eg/django-localeurl | 50da1a4226d067c54bc700088158beab3d33040a | [
"MIT"
] | null | null | null | testlocaleurl/testapp/urls.py | kron4eg/django-localeurl | 50da1a4226d067c54bc700088158beab3d33040a | [
"MIT"
] | null | null | null | from django.conf.urls.defaults import *
urlpatterns = patterns('django.views.generic.simple',
(r'^$', 'direct_to_template', {'template': 'test.html'}),
(r'^independent/', 'direct_to_template', {'template': 'test.html'}),
(r'^locale_url/$', 'direct_to_template', {'template': 'locale_url.html'}),
(r'^chlocale/$', 'direct_to_template', {'template': 'chlocale.html'}),
(r'^rmlocale/$', 'direct_to_template', {'template': 'rmlocale.html'}),
)
urlpatterns += patterns('testapp.views',
url(r'^dummy/$', 'dummy', name='dummy0'),
url(r'^dummy/(?P<test>.+)$', 'dummy', name='dummy1'),
)
| 41.066667 | 79 | 0.625 | 71 | 616 | 5.253521 | 0.394366 | 0.107239 | 0.214477 | 0.321716 | 0.176944 | 0.176944 | 0.176944 | 0 | 0 | 0 | 0 | 0.003697 | 0.121753 | 616 | 14 | 80 | 44 | 0.685767 | 0 | 0 | 0 | 0 | 0 | 0.534091 | 0.043831 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.083333 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
17c555d6dfb6b578b3139052af5b6ca46ddad3a9 | 369 | py | Python | schemas/product_type.py | AndreMPCosta/backend-ecommerce | 002666a424c2c6d294d7e50a9e8269ae75ab87ce | [
"MIT"
] | 1 | 2021-12-05T06:41:39.000Z | 2021-12-05T06:41:39.000Z | schemas/product_type.py | AndreMPCosta/backend-ecommerce | 002666a424c2c6d294d7e50a9e8269ae75ab87ce | [
"MIT"
] | null | null | null | schemas/product_type.py | AndreMPCosta/backend-ecommerce | 002666a424c2c6d294d7e50a9e8269ae75ab87ce | [
"MIT"
] | null | null | null | from marshmallow.fields import Nested
from marshmallow_mongoengine import ModelSchema
from models.product_type import ProductTypeModel
class ProductTypeSchema(ModelSchema):
# __nested__ = False
class Meta:
model = ProductTypeModel
exclude = ('translations',)
attributes = Nested('AttributeSchema', many=True, exclude=('translations',))
| 24.6 | 80 | 0.747967 | 34 | 369 | 7.941176 | 0.647059 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173442 | 369 | 14 | 81 | 26.357143 | 0.885246 | 0.04878 | 0 | 0 | 0 | 0 | 0.111748 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.375 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
17cc2628f973c4030d96d13368fdcd4a65c5ac74 | 2,331 | py | Python | twkit/curation/purgeid.py | evaperon/twAwler | 8e9f2064cad846177ed6547b9f56f053226a2d5e | [
"Apache-2.0"
] | 5 | 2018-12-06T16:14:14.000Z | 2020-05-22T07:36:45.000Z | twkit/curation/purgeid.py | evaperon/twAwler | 8e9f2064cad846177ed6547b9f56f053226a2d5e | [
"Apache-2.0"
] | null | null | null | twkit/curation/purgeid.py | evaperon/twAwler | 8e9f2064cad846177ed6547b9f56f053226a2d5e | [
"Apache-2.0"
] | 3 | 2020-04-20T07:20:18.000Z | 2021-08-19T17:31:38.000Z | #!/usr/bin/python3
# -*- coding: utf-8 -*-
###########################################
# (c) 2016-2020 Polyvios Pratikakis
# polyvios@ics.forth.gr
###########################################
"""
This tool can completely purge a user from the database.
Warning: This may reduce information on other users, as the given user
is purged from all mined relations, too.
"""
import optparse
from datetime import datetime
#from py2neo import Graph, Node, Relationship
from twkit.utils import *
#from graph import *
def del_userid(db, graph, uid):
#print(graph.evaluate("MATCH (n) where n.id_str = {} detach delete n".format(uid)))
print(db.cemetery.delete_many({'id':uid}).deleted_count)
print(db.crawlerdata.delete_many({'id':uid}).deleted_count)
print(db.favorites.delete_many({'user_id':uid}).deleted_count)
print(db.follow.delete_many({'id': uid}).deleted_count)
print(db.follow.delete_many({'follows': uid}).deleted_count)
print(db.following.delete_many({'id':uid}).deleted_count)
print(db.greeks.delete_many({'id':uid}).deleted_count)
print(db.ignored.delete_many({'id': uid}).deleted_count)
print(db.groups.delete_many({'id':uid}).deleted_count)
print(db.lastscan.delete_many({'id':uid}).deleted_count)
print(db.listmembers.delete_many({'user_id':uid}).deleted_count)
print(db.listsubscribers.delete_many({'user_id':uid}).deleted_count)
print(db.protected.delete_many({'id':uid}).deleted_count)
print(db.suspended.delete_many({'id':uid}).deleted_count)
print(db.users.delete_many({'id':uid}).deleted_count)
print(db.uservectors.delete_many({'id':uid}).deleted_count)
print(db.tweets.delete_many({'user.id':uid, 'lang': {'$ne': config.lang}}).deleted_count)
if __name__ == '__main__':
parser = optparse.OptionParser(usage=u'Usage: %prog [options] <user>')
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Make noise")
parser.add_option("--id", action="store_true", dest="ids", default=False, help="Argument is user id")
(options, args) = parser.parse_args()
db, _ = init_state(ignore_api=True)
#graph = Graph("http://neo4j:twittergr@localhost:7474/db/data/")
graph= None
now = datetime.utcnow()
userlist = [x.lower().replace("@","") for x in args]
for user in userlist:
uid = int(user)
del_userid(db, graph, uid)
| 39.508475 | 109 | 0.692407 | 332 | 2,331 | 4.692771 | 0.394578 | 0.07638 | 0.154044 | 0.205392 | 0.399872 | 0.349166 | 0.349166 | 0.349166 | 0.109114 | 0 | 0 | 0.00763 | 0.100386 | 2,331 | 58 | 110 | 40.189655 | 0.735336 | 0.202488 | 0 | 0 | 0 | 0 | 0.101309 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030303 | false | 0 | 0.090909 | 0 | 0.121212 | 0.515152 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
17cd1dd4fb9a38a14a16337ee5ce8b8762a3419b | 235 | py | Python | test/testAddGroup.py | Valeryiar/myPython | 5961e3e9055e93f7a964eafc243925fbbb0f9f86 | [
"Apache-2.0"
] | null | null | null | test/testAddGroup.py | Valeryiar/myPython | 5961e3e9055e93f7a964eafc243925fbbb0f9f86 | [
"Apache-2.0"
] | null | null | null | test/testAddGroup.py | Valeryiar/myPython | 5961e3e9055e93f7a964eafc243925fbbb0f9f86 | [
"Apache-2.0"
] | null | null | null |
from model.group import Group
def testAddGroup(app):
app.session.login( username="admin", password= "secret")
app.group.create( Group (name="name", header="header", footer="footer"))
app.session.logout()
| 18.076923 | 80 | 0.646809 | 28 | 235 | 5.428571 | 0.642857 | 0.131579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 235 | 12 | 81 | 19.583333 | 0.808511 | 0 | 0 | 0 | 0 | 0 | 0.117904 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0.2 | 0.2 | 0 | 0.4 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
17eb3e77fb5d5d898c8a72e94607ea49f8dfc22f | 1,981 | py | Python | data_logging/JSON_lines_record_logger.py | NREL/K_Road | ec8049cf1b81c58dd3b95f8298a362d863cd4a68 | [
"BSD-3-Clause"
] | 1 | 2021-04-19T23:28:26.000Z | 2021-04-19T23:28:26.000Z | data_logging/JSON_lines_record_logger.py | NREL/K_Road | ec8049cf1b81c58dd3b95f8298a362d863cd4a68 | [
"BSD-3-Clause"
] | null | null | null | data_logging/JSON_lines_record_logger.py | NREL/K_Road | ec8049cf1b81c58dd3b95f8298a362d863cd4a68 | [
"BSD-3-Clause"
] | null | null | null | import json
import numpy
from .record_logger import RecordLogger
class JSONLinesRecordLogger(RecordLogger):
"""
Writes data to a JSONLines formatted log file. Each call to write() writes a new JSON object on a new line.
"""
def __init__(self, filename) -> None:
self._file = open(filename, 'w', encoding='utf-8')
def write(self, data) -> None:
# noinspection PyBroadException
def object_converter(obj):
if isinstance(obj, numpy.ndarray):
# convert numpy arrays to lists
return obj.tolist()
elif isinstance(obj, numpy.float32) or \
isinstance(obj, numpy.float64) or \
isinstance(obj, numpy.int8) or \
isinstance(obj, numpy.int16) or \
isinstance(obj, numpy.int32) or \
isinstance(obj, numpy.int64) or \
isinstance(obj, numpy.uint8) or \
isinstance(obj, numpy.uint16) or \
isinstance(obj, numpy.uint32) or \
isinstance(obj, numpy.uint64) or \
isinstance(obj, numpy.intp) or \
isinstance(obj, numpy.uintp):
# convert numpy numbers to python numbers
return obj.item()
try:
return obj.toJSON()
except:
return obj.__dict__
self._file.write(json.dumps(
data,
ensure_ascii=False,
indent=None,
separators=(',', ':'),
default=object_converter))
self._file.write('\n')
def flush(self) -> None:
self._file.flush()
def close(self) -> None:
# noinspection PyBroadException
try:
if hasattr(self, 'file') and self._file is not None and not self._file.closed:
self._file.close()
except:
pass
| 33.016667 | 111 | 0.520949 | 200 | 1,981 | 5.065 | 0.415 | 0.166831 | 0.230997 | 0.217177 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015651 | 0.387178 | 1,981 | 59 | 112 | 33.576271 | 0.818781 | 0.120141 | 0 | 0.095238 | 0 | 0 | 0.008121 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.119048 | false | 0.02381 | 0.071429 | 0 | 0.309524 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
17f18725e41071d3030efbdfd3b92e6c740ddda5 | 2,120 | py | Python | tests/whist/server/api/game/test_create.py | Whist-Team/Whist-Server | 7173bba9cbae505e189c812ebb459af661d656b2 | [
"MIT"
] | 1 | 2021-08-09T21:15:23.000Z | 2021-08-09T21:15:23.000Z | tests/whist/server/api/game/test_create.py | Whist-Team/Whist-Server | 7173bba9cbae505e189c812ebb459af661d656b2 | [
"MIT"
] | 172 | 2021-04-18T21:09:56.000Z | 2022-03-31T20:38:07.000Z | tests/whist/server/api/game/test_create.py | Whist-Team/Whist-Server | 7173bba9cbae505e189c812ebb459af661d656b2 | [
"MIT"
] | 2 | 2021-08-28T19:05:55.000Z | 2022-03-20T17:50:29.000Z | from tests.whist.server.api.game.base_token_case import TestCaseWithToken
from whist.server.database import db
from whist.server.database.game import GameInDb
class CreateGameTestCase(TestCaseWithToken):
def test_post_game(self):
data = {'game_name': 'test', 'password': 'abc'}
response = self.client.post(url='/game/create', json=data, headers=self.headers)
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertTrue('game_id' in response.json())
self.assertEqual(1, db.game.estimated_document_count())
def test_post_game_without_pwd(self):
data = {'game_name': 'test'}
response = self.client.post(url='/game/create', json=data, headers=self.headers)
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertTrue('game_id' in response.json())
self.assertEqual(1, db.game.estimated_document_count())
def test_post_game_without_name(self):
data = {'password': 'abc'}
response = self.client.post(url='/game/create', json=data, headers=self.headers)
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual('"game_name" is required.', response.json()['detail'])
self.assertEqual(0, db.game.estimated_document_count())
def test_post_game_with_settings(self):
data = {'game_name': 'test', 'password': 'abc', 'min_player': 1, 'max_player': 1}
response = self.client.post(url='/game/create', json=data, headers=self.headers)
game = GameInDb(**db.game.find()[0])
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertTrue('game_id' in response.json())
self.assertEqual(1, db.game.estimated_document_count())
self.assertEqual(1, game.table.min_player, msg=f'min_player is {game.table.min_player} '
f'instead of 1')
self.assertEqual(1, game.table.max_player, msg=f'max_player is {game.table.max_player} '
f'instead of 1')
| 53 | 96 | 0.660377 | 270 | 2,120 | 5.025926 | 0.222222 | 0.121592 | 0.058954 | 0.044215 | 0.690494 | 0.613854 | 0.613854 | 0.576271 | 0.576271 | 0.544584 | 0 | 0.013658 | 0.20566 | 2,120 | 39 | 97 | 54.358974 | 0.792162 | 0 | 0 | 0.454545 | 0 | 0 | 0.137264 | 0.021698 | 0 | 0 | 0 | 0 | 0.424242 | 1 | 0.121212 | false | 0.090909 | 0.090909 | 0 | 0.242424 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
17fb7f3c1cd9de2547b73c41f5052f4dc8e04323 | 7,160 | py | Python | twitter.py | casheww/HackerDog8 | de90a40518ed207a7374c2e4cb7936e98a1eea81 | [
"MIT"
] | null | null | null | twitter.py | casheww/HackerDog8 | de90a40518ed207a7374c2e4cb7936e98a1eea81 | [
"MIT"
] | 1 | 2021-08-01T13:40:12.000Z | 2021-08-01T13:40:12.000Z | twitter.py | casheww/HackerDog8 | de90a40518ed207a7374c2e4cb7936e98a1eea81 | [
"MIT"
] | 2 | 2021-08-01T13:28:45.000Z | 2021-08-02T07:14:57.000Z | from os import replace
from typing import TYPE_CHECKING, Optional
import tweepy
import random
PaVerb =["clapped","slapped","permited", "climbed", "destroyed", "corrupted", "snuck","flipped","flew","beat", "sniffled", "barked", "prowled", "growled", ""]
PrVerb = ["running ^pnoun","walking ^mnoun","permitting ^mnoun","gaming","breaking ^noun"]
Pverb=["slaps","permits","claps","runs","stands","crys","brakes","breaths","eats","creeps","likes ^mnoun","loves ^mnoun"]
Describe=["red","nice", "sad", "hot", "trendy", "wet", "damp", "broken","tasty","unhappy","yellow","green","purple","defiant","different","cute","lucky","victorious","adventureus","obvious","orange","euphoric","exemplary","angry"]
Adverb=["abnormally","absentmindedly","accidentally","actually","adventurously","afterwards","almost","always","annually","anxiously","arrogantly","awkwardly","bashfully","beautifully","bitterly","bleakly","blindly","blissfully","boastfully","boldly","bravely","briefly","brightly","briskly","broadly","busily","calmly","carefully","carelessly","cautiously","certainly","cheerfully","clearly","cleverly","closely","coaxingly","colorfully","commonly","continually","coolly","correctly","courageously","crossly","cruelly","curiously","daily","daintily","dearly","deceivingly","deeply","defiantly","deliberately","delightfully","diligently","dimly","doubtfully","dreamily","easily","elegantly","energetically","enormously","enthusiastically","equally","especially","even","evenly","eventually","exactly","excitedly","extremely","fairly","faithfully","famously","far","fast","fatally","ferociously","fervently","fiercely","fondly","foolishly","fortunately","frankly","frantically","freely","frenetically","frightfully","fully","furiously","generally","generously","gently","gladly","gleefully","gracefully","gratefully","greatly","greedily","happily","hastily","healthily","heavily","helpfully","helplessly","highly","honestly","hopelessly","hourly","hungrily","immediately","innocently","inquisitively","instantly","intensely","intently","interestingly","inwardly","irritably","jaggedly","jealously","jovially","joyfully","joyously","jubilantly","judgmentally","justly","keenly","kiddingly","kindheartedly","kindly","knavishly","knowingly","knowledgeably","kookily","lazily","les","lightly","likely","limply","lively","loftily","longingly","loosely","loudly","lovingly","loyally","madly","majestically","meaningfully","mechanically","merrily","miserably","mockingly","monthly","more","mortally","mostly","mysteriously","naturally","nearly","neatly","nervously","never","nicely","noisily","not","obediently","obnoxiously","oddly","offensively","officially","often","only","openly","optimistically","overconfidently","painfully","partially","patiently","perfectly","physically","playfully","politely","poorly","positively","potentially","powerfully","promptly","properly","punctually","quaintly","queasily","questionably","quicker","quickly","quietly","quirkily","quizzically","randomly","rapidly","rarely","readily","really","reassuringly","recklessly","regularly","reluctantly","repeatedly","reproachfully","restfully","righteously","rigidly","roughly","rudely","safely","scarcely","scarily","searchingly","sedately","seemingly","seldom","selfishly","separately","seriously","shakily","sharply","sheepishly","shrilly","shyly","silently","sleepily","slowly","smoothly","softly","solemnly","solidly","sometimes","soon","speedily","stealthily","sternly","strictly","successfully","suddenly","supposedly","surprisingly","suspiciously","sweetly","swiftly","sympathetically","tenderly","tensely","terribly","thankfully","thoroughly","thoughtfully","tightly","tomorrow","too","tremendously","triumphantly","truly","truthfully","ultimately","unabashedly","unaccountably","unbearably","unethically","unexpectedly","unfortunately","unimpressively","unnaturally","unnecessarily","upbeat","upright","upside-down","upward","urgently","usefully","uselessly","usually","utterly","vacantly","vaguely","vainly","valiantly","vastly","verbally","very","viciously","victoriously","violently","vivaciously","voluntarily","warmly","weakly","wearily","well","wetly","wholly","wildly","willfully","wisely","woefully","wonderfully","worriedly","wrongly","yawningly","yearly","yearningly","yesterday","yieldingly","youthfully","zealously","zestfully","zestily","drippaly"]
Pnoun=["Charlie", "Italy", "Japan", "Da Vinchi", "Harry Potter", "Rock", "him", "her", "them", "it", "i", "^pnoun and ^pnoun", "me", "she", "he","( ͡° ͜ʖ ͡°)"]
Noun=["beef", "girl", "dust", "kettle", "Legend of Zelda","phone", "Bone", "Leash", "burger", "boy", "dice", "love", "destruction", "loyalty", "corruption", "disease","Harry Potter and the Deathly Hallows Part 4 on DVD"]
Sentens=["i like ^noun", "much ^adj", "The best game is The ^noun", "^noun is pretty ^adj", "i like ^noun and ^noun", "you have ^a ^adj ^noun", "^pnoun is ^prverb", "^adv, ^pnoun ^paverb ^pnoun ^adj","^noun is just ^a ^adj ^noun", "i hate it when ^pnoun gets ^paverb", "^noun ^pverb ^adv and ^pnoun ^pverb ^adv","^mnoun ^pverb"]
Mnoun=[Pnoun,Noun]#master noun
class RandomModule():
def latername():
sent=random.choice(Sentens)
while True:
sent=sent.replace("^paverb", random.choice(PaVerb),1)
sent=sent.replace("^prverb", random.choice(PrVerb),1)
sent=sent.replace("^pverb", random.choice(Pverb),1)
sent=sent.replace("^adj", random.choice(Describe),1)
sent=sent.replace("^adv", random.choice(Adverb),1)
sent=sent.replace("^pnoun", random.choice(Pnoun),1)
sent=sent.replace("^noun", random.choice(Noun),1)
sent=sent.replace("^mnoun", random.choice(Mnoun[random.randint(0,1)]),1)
if not sent.count("^paverb")+sent.count("^prverb")+sent.count("^adj")+sent.count("^adv")+sent.count("^pnoun")+sent.count("^noun")+sent.count("^mnoun")>0:
while sent.count("^a") != 0:
pos=sent.find("^a")+3
if sent[pos].strip() in ["a","e","i","o","u"]:
sent=sent.replace("^a","an",1)
else:
sent=sent.replace("^a","a",1)
#print(sent)
return sent
class Twitter():
def __init__(self):
auth = tweepy.OAuthHandler("YuzvvrQIgvosXVZ6jahFuTMO6", "IZbVE0LWVq9s0YoQkUpE8v35c0AiYSKCZrlaqJtLp6HR04WqOy")
auth.set_access_token("1419262253065461763-aYVqg6bMSgc75gcWTCcHdYZ3MWCvR3", "CnikegAGLSZojqzfOeKGVA0tNisOwmAE90kudSnVpWdar")
self.api = tweepy.API(auth)
def tweet(self, message, media_ids):
self.api.update_status(message, media_ids=media_ids)
def do_funny(self, image_fp: Optional[str]): #add shit here img
msg = RandomModule.latername()
print(msg)
# optional attachment
media_ids = []
if image_fp is not None:
media = self.api.media_upload(image_fp)
media_ids.append(media.media_id)
self.tweet(msg, media_ids)
if __name__ == "__main__":
t=Twitter()
t.do_funny(None)
| 105.294118 | 3,644 | 0.668017 | 742 | 7,160 | 6.413747 | 0.695418 | 0.01681 | 0.031519 | 0.023534 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00857 | 0.103631 | 7,160 | 67 | 3,645 | 106.865672 | 0.73216 | 0.00824 | 0 | 0 | 0 | 0 | 0.548364 | 0.024182 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078431 | false | 0 | 0.078431 | 0 | 0.215686 | 0.019608 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
aa05bb3fee6e633e84f1501116779ff867f3a532 | 9,354 | py | Python | src/prodstats/db/models/wells.py | la-mar/prodstats | 4ff5a6e0b0d6152af2d7e1f3844ede2d33ad4824 | [
"MIT"
] | null | null | null | src/prodstats/db/models/wells.py | la-mar/prodstats | 4ff5a6e0b0d6152af2d7e1f3844ede2d33ad4824 | [
"MIT"
] | null | null | null | src/prodstats/db/models/wells.py | la-mar/prodstats | 4ff5a6e0b0d6152af2d7e1f3844ede2d33ad4824 | [
"MIT"
] | 1 | 2021-01-05T18:58:08.000Z | 2021-01-05T18:58:08.000Z | from db.models.bases import Base, db
__all__ = [
"WellHeader",
"WellStat",
"WellDepth",
"WellLink",
"WellLocation",
"IPTest",
"FracParameters",
"Survey",
"SurveyPoint",
]
class WellBase(Base):
api14 = db.Column(db.String(14), index=True, primary_key=True)
class WellHeader(WellBase):
__tablename__ = "wells"
api10 = db.Column(db.String(10), index=True)
well_name = db.Column(db.String())
hole_direction = db.Column(db.String(1))
status = db.Column(db.String(50))
is_producing = db.Column(db.Boolean(), index=True)
operator = db.Column(db.String(), index=True)
operator_alias = db.Column(db.String(), index=True)
hist_operator = db.Column(db.String(), index=True)
hist_operator_alias = db.Column(db.String(), index=True)
tvd = db.Column(db.Integer())
md = db.Column(db.Integer())
perfll = db.Column(db.Integer())
lateral_length = db.Column(db.Integer())
ground_elev = db.Column(db.Integer())
kb_elev = db.Column(db.Integer())
comp_date = db.Column(db.Date())
spud_date = db.Column(db.Date())
permit_date = db.Column(db.Date())
permit_number = db.Column(db.String())
permit_status = db.Column(db.String())
rig_release_date = db.Column(db.Date()) # rr_date
last_activity_date = db.Column(db.Date())
basin = db.Column(db.String(50), index=True) # basin
sub_basin = db.Column(db.String(50), index=True) # basin
state = db.Column(db.String(50)) # state_name
state_code = db.Column(db.String(10)) # state_name
county = db.Column(db.String(50), index=True) # county_name
county_code = db.Column(db.String(10)) # county_name
provider_status = db.Column(db.String(50))
provider = db.Column(db.String())
provider_last_update_at = db.Column(db.DateTime(timezone=True))
basin_holedir_isprod_idx = db.Index(
"ix_well_basin_holedir_isprod", "basin", "hole_direction", "is_producing"
)
basin_status_idx = db.Index("ix_well_basin_status", "basin", "status")
class FracParameters(WellBase):
__tablename__ = "frac_parameters"
fluid_bbl = db.Column(db.Integer())
proppant_lb = db.Column(db.Integer())
fluid_bbl_ft = db.Column(db.Integer())
proppant_lb_ft = db.Column(db.Integer())
lateral_length = db.Column(db.Integer())
lateral_length_type = db.Column(db.String(25))
gen = db.Column(db.Integer())
gen_name = db.Column(db.String(10))
provider = db.Column(db.String())
provider_last_update_at = db.Column(db.DateTime(timezone=True))
class WellStat(WellBase):
__tablename__ = "wellstats"
name = db.Column(db.String(50), primary_key=True)
type = db.Column(db.String(25), nullable=False) # numeric, string, date
numeric_value = db.Column(db.Numeric(19, 2))
string_value = db.Column(db.Numeric(19, 2))
date_value = db.Column(db.Numeric(19, 2))
comments = db.Column(db.JSONB(), nullable=False, server_default="{}")
# wellbore_crow_length = db.Column(db.Integer()) # wellbore_linear_distance
# wellbore_direction = db.Column(db.String(1)) # wellbore_direction
# wellbore_bearing = db.Column(db.Float()) # wellbore_direction_degrees
# wellbore_dls_roc = db.Column(db.Float())
# lateral_dls_roc = db.Column(db.Float())
# wellbore_dls_mc = db.Column(db.Float())
# lateral_dls_mc = db.Column(db.Float())
# nearest_prospect = db.Column(db.String(50))
# dist_to_prospect_mi = db.Column(db.Float())
# nearest_api10 = db.Column(db.String(50))
# dist_to_deo_well_mi = db.Column(db.Float())
class WellDepth(WellBase):
__tablename__ = "depths"
name = db.Column(db.String(50), index=True, primary_key=True)
value = db.Column(db.Integer())
property_name = db.Column(db.String(50), index=True)
aggregate_type = db.Column(db.String(25), index=True)
grid_id = db.Column(db.Integer(), index=True)
formation = db.Column(db.String(50), index=True)
into_formation_feet = db.Column(db.Integer())
into_formation_percent = db.Column(db.Float())
above_next_formation_feet = db.Column(db.Integer())
above_next_formation_percent = db.Column(db.Float())
overlap_feet = db.Column(db.Integer())
overlap_percent = db.Column(db.Float())
in_target = db.Column(db.Boolean())
assignment_method = db.Column(db.String()) # TODO: enum
class WellLink(WellBase):
__tablename__ = "well_links"
name = db.Column(db.String(50), index=True, primary_key=True)
value = db.Column(db.String())
class WellLocation(WellBase):
__tablename__ = "well_locations"
name = db.Column(db.String(50), index=True, primary_key=True)
block = db.Column(db.String(50))
section = db.Column(db.String(50))
abstract = db.Column(db.String(50))
survey = db.Column(db.String(50))
metes_bounds = db.Column(db.String(50))
lon = db.Column(db.Float())
lat = db.Column(db.Float())
geom = db.Column(db.Geometry("POINT", srid=4326, spatial_index=False))
ix_well_location_geom = db.Index(
"ix_well_location_geom", "geom", postgresql_using="gist"
)
class Survey(WellBase):
__tablename__ = "surveys"
survey_type = db.Column(db.String(50))
survey_method = db.Column(db.String(50))
survey_date = db.Column(db.Date())
survey_top = db.Column(db.Integer())
survey_top_uom = db.Column(db.String(10))
survey_base = db.Column(db.Integer())
survey_base_uom = db.Column(db.String(10))
# NOTE: alembic doesn't play nicely with spatial created by geoalchemy2. It
# will autogenerate them, then delete them in the next migration. The
# workaround here is to disable the automatic index creation by
# geoalchemy2 (spatial_index=False) and explictly add the gist indices
# using sqlalchemy.
wellbore = db.Column(db.Geometry("LINESTRING", srid=4326, spatial_index=False))
lateral_only = db.Column(db.Geometry("LINESTRING", srid=4326, spatial_index=False))
stick = db.Column(db.Geometry("LINESTRING", srid=4326, spatial_index=False))
bent_stick = db.Column(db.Geometry("LINESTRING", srid=4326, spatial_index=False))
ix_survey_wellbore = db.Index(
"ix_survey_wellbore", "wellbore", postgresql_using="gist"
)
ix_survey_lateral_only = db.Index(
"ix_survey_lateral_only", "lateral_only", postgresql_using="gist"
)
ix_survey_stick = db.Index("ix_survey_stick", "stick", postgresql_using="gist")
ix_survey_bent_stick = db.Index(
"ix_survey_bent_stick", "bent_stick", postgresql_using="gist"
)
class SurveyPoint(WellBase):
__tablename__ = "survey_points"
md = db.Column(db.Integer(), primary_key=True)
tvd = db.Column(db.Integer())
dip = db.Column(db.Float())
sequence = db.Column(db.Integer())
theta = db.Column(db.Float())
is_in_lateral = db.Column(db.Boolean(), nullable=False, default=False)
is_heel_point = db.Column(db.Boolean(), nullable=False, default=False)
is_mid_point = db.Column(db.Boolean(), nullable=False, default=False)
is_toe_point = db.Column(db.Boolean(), nullable=False, default=False)
is_soft_corner = db.Column(db.Boolean(), nullable=False, default=False)
is_hard_corner = db.Column(db.Boolean(), nullable=False, default=False)
is_kop = db.Column(db.Boolean(), nullable=False, default=False)
geom = db.Column(db.Geometry("POINT", srid=4326, spatial_index=False))
ix_lateral_partial = db.Index(
"ix_lateral_partial",
"api14",
"is_in_lateral",
postgresql_where=(is_in_lateral),
)
ix_heel_partial = db.Index(
"ix_heel_partial", "api14", "is_heel_point", postgresql_where=(is_heel_point),
)
ix_mid_partial = db.Index(
"ix_mid_partial", "api14", "is_mid_point", postgresql_where=(is_mid_point),
)
ix_toe_partial = db.Index(
"ix_toe_partial", "api14", "is_toe_point", postgresql_where=(is_toe_point),
)
ix_survey_point_geom = db.Index(
"ix_survey_point_geom", "geom", postgresql_using="gist"
)
class IPTest(WellBase):
__tablename__ = "ip_tests"
test_number = db.Column(db.Integer(), primary_key=True)
test_date = db.Column(db.Date(), index=True)
type_code = db.Column(db.String(10))
test_method = db.Column(db.String())
completion = db.Column(db.Integer())
oil = db.Column(db.Integer())
oil_per10k = db.Column(db.Integer())
oil_uom = db.Column(db.String(10))
gas = db.Column(db.Integer())
gas_per10k = db.Column(db.Integer())
gas_uom = db.Column(db.String(10))
water = db.Column(db.Integer())
water_per10k = db.Column(db.Integer())
water_uom = db.Column(db.String(10))
choke = db.Column(db.String(25))
depth_top = db.Column(db.Integer())
depth_top_uom = db.Column(db.String(10))
depth_base = db.Column(db.Integer())
depth_base_uom = db.Column(db.String(10))
sulfur = db.Column(db.Boolean())
oil_gravity = db.Column(db.Float())
oil_gravity_uom = db.Column(db.String(10))
gor = db.Column(db.Integer())
gor_uom = db.Column(db.String(10))
perf_upper = db.Column(db.Integer())
perf_upper_uom = db.Column(db.String(10))
perf_lower = db.Column(db.Integer())
perf_lower_uom = db.Column(db.String(10))
perfll = db.Column(db.Integer())
perfll_uom = db.Column(db.String(10))
| 38.652893 | 87 | 0.678426 | 1,321 | 9,354 | 4.582892 | 0.161998 | 0.182359 | 0.227948 | 0.150644 | 0.609184 | 0.436901 | 0.282788 | 0.213413 | 0.182028 | 0.162537 | 0 | 0.018466 | 0.172119 | 9,354 | 241 | 88 | 38.813278 | 0.763301 | 0.102202 | 0 | 0.07772 | 0 | 0 | 0.074919 | 0.008484 | 0 | 0 | 0 | 0.004149 | 0 | 1 | 0 | false | 0 | 0.005181 | 0 | 0.823834 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
aa1d58cd6e3011951a349e75d29864f22c42733d | 849 | py | Python | alipay/aop/api/response/MybankCreditLoantradeLoanrelationQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/response/MybankCreditLoantradeLoanrelationQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/response/MybankCreditLoantradeLoanrelationQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class MybankCreditLoantradeLoanrelationQueryResponse(AlipayResponse):
def __init__(self):
super(MybankCreditLoantradeLoanrelationQueryResponse, self).__init__()
self._loan_relation_flag = None
@property
def loan_relation_flag(self):
return self._loan_relation_flag
@loan_relation_flag.setter
def loan_relation_flag(self, value):
self._loan_relation_flag = value
def parse_response_content(self, response_content):
response = super(MybankCreditLoantradeLoanrelationQueryResponse, self).parse_response_content(response_content)
if 'loan_relation_flag' in response:
self.loan_relation_flag = response['loan_relation_flag']
| 32.653846 | 119 | 0.757362 | 90 | 849 | 6.755556 | 0.388889 | 0.177632 | 0.236842 | 0.131579 | 0.075658 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001412 | 0.166078 | 849 | 25 | 120 | 33.96 | 0.857345 | 0.04947 | 0 | 0 | 0 | 0 | 0.044721 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.125 | 0.0625 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
aa204ed074af8386c256aec5bd333f132c0d9035 | 1,242 | py | Python | examples/create_list.py | emillion/zimsoap | d1ea2eb4d50f263c9a16e5549af03f1eff3e295e | [
"Apache-2.0"
] | 11 | 2015-02-15T23:52:56.000Z | 2021-05-06T18:05:24.000Z | examples/create_list.py | emillion/zimsoap | d1ea2eb4d50f263c9a16e5549af03f1eff3e295e | [
"Apache-2.0"
] | 25 | 2015-01-14T11:27:51.000Z | 2016-09-07T14:06:52.000Z | examples/create_list.py | emillion/zimsoap | d1ea2eb4d50f263c9a16e5549af03f1eff3e295e | [
"Apache-2.0"
] | 10 | 2015-08-12T14:45:17.000Z | 2021-12-08T23:40:35.000Z | import zimsoap.client
import zimsoap.utils
from zimsoap.zobjects import DistributionList
zc = zimsoap.client.ZimbraAdminClient("xxxxxxx", "7071")
zc.login("zimbra", "xxxxxxxxxx")
a = zc.get_distribution_list(
DistributionList(name="testliste@zimbralab.example.org"))
try:
liste = zc.delete_distribution_list(
DistributionList(name="testliste@zimbralab.example.org"))
except Exception as e:
print(e)
liste = zc.create_distribution_list("testliste@zimbralab.example.org")
zc.add_distribution_list_member(
DistributionList(name="testliste@zimbralab.example.org"),
["test1@example.com", "test2@example.com", "test3@example.com"])
a = zc.get_distribution_list(
DistributionList(name="testliste@zimbralab.example.org"))
zc.remove_distribution_list_member(
DistributionList(name="testliste@zimbralab.example.org"),
["test2@example.com"])
b = zc.get_distribution_list(
DistributionList(name="testliste@zimbralab.example.org"))
zc.add_distribution_list_member(
DistributionList(name="testliste@zimbralab.example.org"),
["test21@example.com", "test22@example.com", "test23@example.com"])
c = zc.get_distribution_list(
DistributionList(name="testliste@zimbralab.example.org"))
print(c.members)
| 37.636364 | 71 | 0.768921 | 149 | 1,242 | 6.268456 | 0.302013 | 0.154176 | 0.240899 | 0.269807 | 0.633833 | 0.633833 | 0.633833 | 0.633833 | 0.56531 | 0.490364 | 0 | 0.012433 | 0.093398 | 1,242 | 32 | 72 | 38.8125 | 0.817052 | 0 | 0 | 0.413793 | 0 | 0 | 0.344605 | 0.224638 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.103448 | 0 | 0.103448 | 0.068966 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
aa2688c8ab1bc14cdb9467f00f1cacb7cbd37fa4 | 258 | py | Python | decorator1.py | darkless456/Python | 1ba37d028e4a818ccfffc18682c1bac15554e3ac | [
"MIT"
] | null | null | null | decorator1.py | darkless456/Python | 1ba37d028e4a818ccfffc18682c1bac15554e3ac | [
"MIT"
] | null | null | null | decorator1.py | darkless456/Python | 1ba37d028e4a818ccfffc18682c1bac15554e3ac | [
"MIT"
] | null | null | null | # decorator.py
def deco(func):
print('ok')
return func
@deco
def foo():
print('foo')
foo()
# ---------等价于-----------
print("-"*10 , '等价于', '-'*10)
def deco1(func):
print('ok')
return func
def foo():
print('foo')
deco1(foo)
foo()
| 11.727273 | 30 | 0.5 | 34 | 258 | 3.794118 | 0.352941 | 0.139535 | 0.170543 | 0.263566 | 0.325581 | 0 | 0 | 0 | 0 | 0 | 0 | 0.029851 | 0.22093 | 258 | 21 | 31 | 12.285714 | 0.61194 | 0.139535 | 0 | 0.666667 | 0 | 0 | 0.068493 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.266667 | false | 0 | 0 | 0 | 0.4 | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
aa3443cedb73970973c2cd1e82c87330fa545a8d | 4,462 | py | Python | tests/test_int32.py | odonnellnoel/litcoin | cebe745df97d060c16b8d9dfa9e58a0418f75560 | [
"MIT"
] | null | null | null | tests/test_int32.py | odonnellnoel/litcoin | cebe745df97d060c16b8d9dfa9e58a0418f75560 | [
"MIT"
] | null | null | null | tests/test_int32.py | odonnellnoel/litcoin | cebe745df97d060c16b8d9dfa9e58a0418f75560 | [
"MIT"
] | null | null | null | from litcoin.binhex import b, x
from litcoin.int32 import INT32_SIZE_IN_BYTES, validate_int32, serialize_int32, deserialize_int32
import unittest
class TestInt32(unittest.TestCase):
def test_INT32_SIZE_IN_BYTES(self):
assert INT32_SIZE_IN_BYTES == 4
def test_validate_int32(self):
validate_int32(0)
validate_int32(0x7fffffff)
validate_int32(-0x80000000)
with self.assertRaises(AssertionError,
msg="should be raised because positive `n` argument is one too high"):
validate_int32(0x80000000)
with self.assertRaises(AssertionError,
msg="should be raised because negative `n` argument is one too low"):
validate_int32(-0x80000001)
with self.assertRaises(AssertionError, msg="should be raised because `n` argument is float"):
validate_int32(0.0)
with self.assertRaises(AssertionError, msg="should be raised because `n` argument is the wrong type"):
validate_int32("wrong type")
with self.assertRaises(TypeError, msg="should be raised because all arguments are missing"):
validate_int32()
def test_serialize_int32(self):
assert serialize_int32(0) == b("00000000")
assert serialize_int32(1) == b("01000000")
assert serialize_int32(-1) == b("ffffffff")
assert serialize_int32(2147483647) == b("ffffff7f")
assert serialize_int32(-2147483648) == b("00000080")
with self.assertRaises(AssertionError,
msg="should be raised because positive `n` argument is one too high"):
serialize_int32(0x80000000)
with self.assertRaises(AssertionError,
msg="should be raised because negative `n` argument is one too low"):
serialize_int32(-0x80000001)
with self.assertRaises(AssertionError, msg="should be raised because `n` argument is of the wrong type"):
serialize_int32("wrong type")
with self.assertRaises(TypeError, msg="should be raised because all arguments are missing"):
serialize_int32()
def test_deserialize_int32(self):
assert deserialize_int32(b("00000000")) == (0, 4)
assert deserialize_int32(b("01000000")) == (1, 4)
assert deserialize_int32(b("ffffffff")) == (-1, 4)
assert deserialize_int32(b("feffffff")) == (-2, 4)
assert deserialize_int32(b("00000000"), 0) == (0, 4)
assert deserialize_int32(b("01000000"), 0) == (1, 4)
assert deserialize_int32(b("ffffffff"), 0) == (-1, 4)
assert deserialize_int32(b("feffffff"), 0) == (-2, 4)
assert deserialize_int32(b("00000000cc"), 0) == (0, 4)
assert deserialize_int32(b("01000000cc"), 0) == (1, 4)
assert deserialize_int32(b("ffffffffcc"), 0) == (-1, 4)
assert deserialize_int32(b("feffffffcc"), 0) == (-2, 4)
assert deserialize_int32(b("cc00000000"), 1) == (0, 5)
assert deserialize_int32(b("cc01000000"), 1) == (1, 5)
assert deserialize_int32(b("ccffffffff"), 1) == (-1, 5)
assert deserialize_int32(b("ccfeffffff"), 1) == (-2, 5)
assert deserialize_int32(b("cc00000000cc"), 1) == (0, 5)
assert deserialize_int32(b("cc01000000cc"), 1) == (1, 5)
assert deserialize_int32(b("ccffffffffcc"), 1) == (-1, 5)
assert deserialize_int32(b("ccfeffffffcc"), 1) == (-2, 5)
with self.assertRaises(AssertionError, msg="should be raised because `data` argument is 3 bytes long"):
deserialize_int32(b("000000"))
with self.assertRaises(AssertionError,
msg="should be raised because `i` argument is 0 when it should be 1 thus a different "
"value is deserialized"):
assert deserialize_int32(b("cc00000000"), 0) == 0
with self.assertRaises(AssertionError,
msg="should be raised because `i` argument is 2 when it should be 1 thus there\"s an "
"overflow"):
deserialize_int32(b("cc00000000"), 2)
with self.assertRaises(AssertionError, msg="should be raised because `data` argument is of the wrong type"):
deserialize_int32("wrong type")
with self.assertRaises(TypeError, msg="should be raised because all arguments are missing"):
deserialize_int32()
| 51.883721 | 117 | 0.623263 | 524 | 4,462 | 5.19084 | 0.175573 | 0.158824 | 0.14375 | 0.177574 | 0.707721 | 0.666912 | 0.619118 | 0.440074 | 0.440074 | 0.440074 | 0 | 0.112663 | 0.26199 | 4,462 | 85 | 118 | 52.494118 | 0.713331 | 0 | 0 | 0.178082 | 0 | 0 | 0.256163 | 0 | 0 | 0 | 0.013447 | 0 | 0.561644 | 1 | 0.054795 | false | 0 | 0.041096 | 0 | 0.109589 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.