hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
268dcfcbfe53d8ec28133e7e8904518da9a4d49f | 409 | py | Python | plugins/dashboard/widgets/uptime.py | ajenti/ajen | 177c1a67278a7763ed06eb2f773d7b409a85ec77 | [
"MIT"
] | 3,777 | 2015-02-21T00:10:12.000Z | 2022-03-30T15:33:22.000Z | plugins/dashboard/widgets/uptime.py | ajenti/ajen | 177c1a67278a7763ed06eb2f773d7b409a85ec77 | [
"MIT"
] | 749 | 2015-03-12T14:17:03.000Z | 2022-03-25T13:22:28.000Z | plugins/dashboard/widgets/uptime.py | ajenti/ajen | 177c1a67278a7763ed06eb2f773d7b409a85ec77 | [
"MIT"
] | 687 | 2015-03-21T10:42:33.000Z | 2022-03-21T23:18:12.000Z | import psutil
import time
from jadi import component
from aj.plugins.dashboard.api import Widget
@component(Widget)
class UptimeWidget(Widget):
id = 'uptime'
name = _('Uptime')
template = '/dashboard:resources/partial/widgets/uptime.html'
def __init__(self, context):
Widget.__init__(self, context)
def get_value(self, config):
return time.time() - psutil.boot_time()
| 21.526316 | 65 | 0.701711 |
4fcea72a447ec14238b0be6de48a6d11dcb3ae59 | 3,209 | py | Python | lark/parsers/lalr_puppet.py | julienmalard/lark | a768506945a56ed1503cba230fb79d7beaa4dacf | [
"MIT"
] | null | null | null | lark/parsers/lalr_puppet.py | julienmalard/lark | a768506945a56ed1503cba230fb79d7beaa4dacf | [
"MIT"
] | null | null | null | lark/parsers/lalr_puppet.py | julienmalard/lark | a768506945a56ed1503cba230fb79d7beaa4dacf | [
"MIT"
] | null | null | null | # This module provide a LALR puppet, which is used to debugging and error handling
from copy import deepcopy
from .lalr_analysis import Shift, Reduce
from .. import Token
class ParserPuppet(object):
def __init__(self, parser, state_stack, value_stack, start, stream, set_state):
self.parser = parser
self._state_stack = state_stack
self._value_stack = value_stack
self._start = start
self._stream = stream
self._set_state = set_state
self.result = None
def feed_token(self, token):
"""Advance the parser state, as if it just received `token` from the lexer
"""
end_state = self.parser.parse_table.end_states[self._start]
state_stack = self._state_stack
value_stack = self._value_stack
state = state_stack[-1]
action, arg = self.parser.parse_table.states[state][token.type]
assert arg != end_state
while action is Reduce:
rule = arg
size = len(rule.expansion)
if size:
s = value_stack[-size:]
del state_stack[-size:]
del value_stack[-size:]
else:
s = []
value = self.parser.callbacks[rule](s)
_action, new_state = self.parser.parse_table.states[state_stack[-1]][rule.origin.name]
assert _action is Shift
state_stack.append(new_state)
value_stack.append(value)
if state_stack[-1] == end_state:
self.result = value_stack[-1]
return self.result
state = state_stack[-1]
action, arg = self.parser.parse_table.states[state][token.type]
assert arg != end_state
assert action is Shift
state_stack.append(arg)
value_stack.append(token)
def copy(self):
return type(self)(
self.parser,
list(self._state_stack),
deepcopy(self._value_stack),
self._start,
self._stream,
self._set_state,
)
def __eq__(self, other):
if not isinstance(other, ParserPuppet):
return False
return (
self._state_stack == other._state_stack and
self._value_stack == other._value_stack and
self._stream == other._stream and
self._start == other._start
)
def pretty(self):
out = ["Puppet choices:"]
for k, v in self.choices().items():
out.append('\t- %s -> %s' % (k, v))
out.append('stack size: %s' % len(self._state_stack))
return '\n'.join(out)
def choices(self):
return self.parser.parse_table.states[self._state_stack[-1]]
def accepts(self):
accepts = set()
for t in self.choices():
new_puppet = self.copy()
try:
new_puppet.feed_token(Token(t, ''))
except KeyError:
pass
else:
accepts.add(t)
return accepts
def resume_parse(self):
return self.parser.parse(self._stream, self._start, self._set_state, self._value_stack, self._state_stack)
| 30.273585 | 114 | 0.571518 |
aab89020ea8022bc6136476bddb62d2686374c50 | 527 | py | Python | testgear/HPAK/B2902A.py | PhilippCo/testgear | 9dbf9c6e1028394375ccca6720d5be1854315f93 | [
"MIT"
] | 1 | 2021-08-25T15:16:57.000Z | 2021-08-25T15:16:57.000Z | testgear/HPAK/B2902A.py | PhilippCo/testgear | 9dbf9c6e1028394375ccca6720d5be1854315f93 | [
"MIT"
] | null | null | null | testgear/HPAK/B2902A.py | PhilippCo/testgear | 9dbf9c6e1028394375ccca6720d5be1854315f93 | [
"MIT"
] | null | null | null | """Agilent B2902A Precision Source/Measure Unit"""
import testgear.base_classes as base
import numpy as np
class B2902A(base.source):
def init(self):
self.set_timeout(10)
self.idstr = self.query("*IDN?").strip()
def set_output(self, voltage=None, current=None, enabled=True, frequency=0, resistance=None, fourWire=False, channel=1):
pass
def get_output(self):
"""return an object which reflects the output conditions"""
obj = base.output_status()
return obj | 27.736842 | 124 | 0.667932 |
3519467e6227b676c514ed7c97cc8c3251178ad7 | 569 | py | Python | src/meta_bilstm/nn_utils/losses.py | brongulus/MetaBiLSTM | ca70c96fa375cb7e4ca5a4199395e6fe00ce6ed3 | [
"MIT"
] | null | null | null | src/meta_bilstm/nn_utils/losses.py | brongulus/MetaBiLSTM | ca70c96fa375cb7e4ca5a4199395e6fe00ce6ed3 | [
"MIT"
] | null | null | null | src/meta_bilstm/nn_utils/losses.py | brongulus/MetaBiLSTM | ca70c96fa375cb7e4ca5a4199395e6fe00ce6ed3 | [
"MIT"
] | null | null | null | import torch.nn.functional as F
def seq_loss(inp, lens, labels):
"""Calculate the cross entropy loss"""
total_loss = 0
for i in range(len(inp)):
total_loss += F.cross_entropy(
inp[i, : lens[i], :], labels[i][: lens[i]], reduction="sum"
)
return total_loss / sum(lens)
def calc_accuracy(inp_inds, output, true_labels):
"""Calculates the accuracy"""
mask = inp_inds != 0
total_preds = mask.sum()
correct = ((output.argmax(dim=2) == true_labels) * mask).sum()
return correct.float() / total_preds.item()
| 28.45 | 71 | 0.620387 |
ee0a363d9df28a3a1f772b5d50155674ba34485c | 92 | py | Python | backend/gifz_api/gifs/apps.py | mkusiciel/terraform-workshops | 52fd795cbd4e2b180dc7729cefe69cde40a50abf | [
"MIT"
] | 3 | 2018-04-14T08:30:56.000Z | 2018-06-30T06:56:00.000Z | backend/gifz_api/gifs/apps.py | 0mk1/terraform-workshops | 689ed933bbffa90dd49cd2bc5617a971cb2c5e37 | [
"MIT"
] | 1 | 2018-06-30T07:08:09.000Z | 2018-06-30T07:08:09.000Z | backend/gifz_api/gifs/apps.py | 0mk1/terraform-workshops | 689ed933bbffa90dd49cd2bc5617a971cb2c5e37 | [
"MIT"
] | 3 | 2018-04-14T10:41:59.000Z | 2019-11-10T21:27:43.000Z | from django.apps import AppConfig
class GifsConfig(AppConfig):
name = 'gifz_api.gifs'
| 15.333333 | 33 | 0.75 |
fcee9b6c51f2cf8eb34ed440803d5a73c7fdb63c | 5,629 | py | Python | dae_ecgsim.py | lingfliu/sfnn | e747bdcf964ec2395165cebb547981e73761457b | [
"Apache-2.0"
] | 1 | 2019-11-13T05:34:32.000Z | 2019-11-13T05:34:32.000Z | dae_ecgsim.py | lingfliu/sfnn | e747bdcf964ec2395165cebb547981e73761457b | [
"Apache-2.0"
] | null | null | null | dae_ecgsim.py | lingfliu/sfnn | e747bdcf964ec2395165cebb547981e73761457b | [
"Apache-2.0"
] | null | null | null | from keras.layers import ConvLSTM2D, Dense, Conv1D, TimeDistributed, BatchNormalization, MaxPooling2D, MaxPooling1D
from keras.layers import Bidirectional, CuDNNLSTM, Dropout, LSTM, Add, Conv2D, Multiply
from keras.layers import Reshape, Input, Flatten, BatchNormalization
from keras.models import Model
import keras
'''lib loading error prevention'''
import os
os.environ['KMP_DUPLICATE_LIB_OK']='True'
'''========================'''
'''tensorflow configuration'''
'''========================'''
import tensorflow as tf
from keras import backend as K
num_cores = 48
num_CPU = 1
num_GPU = 1
config = tf.ConfigProto(intra_op_parallelism_threads=num_cores,\
inter_op_parallelism_threads=num_cores, allow_soft_placement=True,\
device_count = {'CPU' : num_CPU, 'GPU' : num_GPU})
session = tf.Session(config=config)
K.set_session(session)
'''scientific packages'''
import numpy as np
import pickle
import datetime
'''load data'''
(sigs, sigs_noisy, idx, sps, ecg_N, base_snr, artifact_snr) = pickle.load(open('dae_ecgsim_stepnoise_500.dat', 'rb'))
'''global parameters'''
sample_len = len(sigs)
input_dim = 1
output_dim = 1
if input_dim < output_dim:
print('input_dim smaller than output_dim, quit task')
stride = output_dim
timestep = 0
# neural params
batch_size = 40
epochs = 240
filter_size = 80
kernel_size = 4
dropout = 0.2
# stagging the signal
x_train = []
for sig in sigs_noisy:
seq_noisy = np.array([sig[i*stride:i*stride+input_dim] for i in range((len(sig)-input_dim)//stride)])
x_train.append(seq_noisy)
y_train = []
for sig in sigs:
y = np.array([sig[i*stride+input_dim//2-output_dim//2:i*stride+input_dim//2 - output_dim//2 + output_dim] for i in range( (len(sig)-input_dim)//stride )])
y_train.append(y)
# update the timestep
timestep = len(x_train[0])
x_train = np.array(x_train)
y_train = np.array(y_train)
'''build neural'''
input = Input(shape=(timestep, input_dim))
dae = input
'''ConvNN before putting into LSTM'''
if input_dim > kernel_size:
dae = Reshape(target_shape=(timestep, input_dim, 1))(dae)
dae = TimeDistributed(Conv1D(16, kernel_size=kernel_size, data_format='channels_last', activation='relu'))(dae)
dae = TimeDistributed(Conv1D(32, kernel_size=kernel_size, data_format='channels_last', activation='relu'))(dae)
dae = TimeDistributed(Flatten(data_format='channels_last'))(dae)
'''residual LSTM'''
# layer_input = []
# layer_output = []
# for i in range(3):
# if len(layer_output) <= 0:
# ii = dae
# elif len(layer_output) == 1:
# ii = layer_output[0]
# else:
# ii = Add()(layer_output[:i])
#
# layer_input.append(ii)
# oo = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(ii)
# layer_output.append(oo)
#
# dae = layer_output[-1]
'''manually partially connected residual LSTM'''
# i1 = dae
# o1 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(i1)
# i2 = o1
# o2 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(i2)
# i3 = Add()([o1, o2])
# o3 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(i3)
# i4 = o3
# o4 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(i4)
#
# dae = Add()([o3, o4])
'''LSTM'''
# o1 = (CuDNNLSTM(filter_size, return_sequences=True))(dae)
# o2 = (CuDNNLSTM(filter_size, return_sequences=True))(o1)
'''bidirectional LSTM'''
o1 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(dae)
o2 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(o1)
o2 = Add()([o1, o2])
o2 = Dropout(0.2)(o2)
o2 = BatchNormalization()(o2)
o3 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(o2)
o3 = Add()([o1, o2, o3])
o3 = Dropout(0.2)(o3)
o3 = BatchNormalization()(o3)
# '''attention model'''
# o3a = TimeDistributed(Dense(filter_size*2, activation='softmax'))(o3)
# o3v = Multiply()([o3a, o1])
o4 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(o3)
o4 = Add()([o3, o4])
o4 = Dropout(0.2)(o4)
o4 = BatchNormalization()(o4)
o5 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(o4)
o5 = Add()([o3, o4, o5])
o5 = Dropout(0.2)(o5)
o5 = BatchNormalization()(o5)
o6 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(o5)
'''attention model'''
o6 = TimeDistributed(Dense(filter_size*2, activation='relu'))(o6)
o6 = TimeDistributed(Dense(filter_size*2, activation='softmax'))(o6)
o6v = Multiply()([o6, o5])
dae = o6v
#
# o3 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(Add()([o1, o2]))
# o4 = Bidirectional(CuDNNLSTM(filter_size, return_sequences=True))(o3)
''' fully connected DAE'''
# dae = Add()([o1, o2])
# dae = Add()([o3, o4])
if input_dim > kernel_size:
dae = TimeDistributed(Dense(160, activation='linear'))(dae)
dae = TimeDistributed(Dense(filter_size*2, activation='relu'))(dae)
dae = TimeDistributed(Dense(1, activation='linear'))(dae)
model = Model(input, dae)
print(model.summary())
model.compile(optimizer=keras.optimizers.RMSprop(lr=0.001, rho=0.9, epsilon=None, decay=0.0), metrics=['mae'], loss='logcosh')
hist = model.fit(x_train[:300], y_train[:300], validation_data=(x_train[300:400], y_train[300:400]), batch_size=batch_size, epochs=epochs, verbose=1)
predicted = model.predict(x_train[400:])
expected = y_train[400:]
# '''save the result'''
date_str = datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S')
model.save('dae_ecgsim' + date_str + '.h5')
hist_name = 'dae_ecgsim_hist_' + date_str +'.dat'
pickle.dump(hist, open(hist_name, 'wb'))
import matplotlib.pyplot as plt
plot_idx = 30
plt.plot(predicted[plot_idx])
plt.plot(expected[plot_idx])
plt.plot(x_train[plot_idx])
| 29.471204 | 158 | 0.70652 |
f6c7997c4df60254984c0423624d2489218edb15 | 10,664 | py | Python | tests/common/custom_cluster_test_suite.py | radford-nguyen/impala | eb95c912cb7e720caf66388942b4e94d95e95658 | [
"Apache-2.0"
] | null | null | null | tests/common/custom_cluster_test_suite.py | radford-nguyen/impala | eb95c912cb7e720caf66388942b4e94d95e95658 | [
"Apache-2.0"
] | null | null | null | tests/common/custom_cluster_test_suite.py | radford-nguyen/impala | eb95c912cb7e720caf66388942b4e94d95e95658 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Superclass for all tests that need a custom cluster.
# TODO: Configure cluster size and other parameters.
import logging
import os
import os.path
import pipes
import pytest
import re
import subprocess
from subprocess import check_call
from tests.common.impala_test_suite import ImpalaTestSuite
from tests.common.impala_cluster import ImpalaCluster
from tests.util.filesystem_utils import IS_LOCAL
from time import sleep
IMPALA_HOME = os.environ['IMPALA_HOME']
DEFAULT_CLUSTER_SIZE = 3
NUM_COORDINATORS = DEFAULT_CLUSTER_SIZE
# Additional args passed to respective daemon command line.
IMPALAD_ARGS = 'impalad_args'
STATESTORED_ARGS = 'state_store_args'
CATALOGD_ARGS = 'catalogd_args'
# Additional args passed to the start-impala-cluster script.
START_ARGS = 'start_args'
SENTRY_CONFIG = 'sentry_config'
SENTRY_LOG_DIR = 'sentry_log_dir'
CLUSTER_SIZE = "cluster_size"
# Default query options passed to the impala daemon command line. Handled separately from
# other impala daemon arguments to allow merging multiple defaults into a single list.
DEFAULT_QUERY_OPTIONS = 'default_query_options'
IMPALA_LOG_DIR = 'impala_log_dir'
# Run with fast topic updates by default to reduce time to first query running.
DEFAULT_STATESTORE_ARGS = '--statestore_update_frequency_ms=50 \
--statestore_priority_update_frequency_ms=50 \
--statestore_heartbeat_frequency_ms=50'
class CustomClusterTestSuite(ImpalaTestSuite):
"""Every test in a test suite deriving from this class gets its own Impala cluster.
Custom arguments may be passed to the cluster by using the @with_args decorator."""
@classmethod
def get_workload(cls):
return 'tpch'
@classmethod
def add_test_dimensions(cls):
super(CustomClusterTestSuite, cls).add_test_dimensions()
cls.add_custom_cluster_constraints()
@classmethod
def add_custom_cluster_constraints(cls):
# Defines constraints for custom cluster tests, called by add_test_dimensions.
# By default, custom cluster tests only run on text/none and with a limited set of
# exec options. Subclasses may override this to relax these default constraints.
super(CustomClusterTestSuite, cls).add_test_dimensions()
cls.ImpalaTestMatrix.add_constraint(lambda v:
v.get_value('table_format').file_format == 'text' and
v.get_value('table_format').compression_codec == 'none')
cls.ImpalaTestMatrix.add_constraint(lambda v:
v.get_value('exec_option')['batch_size'] == 0 and
v.get_value('exec_option')['disable_codegen'] == False and
v.get_value('exec_option')['num_nodes'] == 0)
@classmethod
def setup_class(cls):
# Explicit override of ImpalaTestSuite.setup_class(). For custom cluster, the
# ImpalaTestSuite.setup_class() procedure needs to happen on a per-method basis.
# IMPALA-3614: @SkipIfLocal.multiple_impalad workaround
# IMPALA-2943 TODO: When pytest is upgraded, see if this explicit skip can be
# removed in favor of the class-level SkipifLocal.multiple_impalad decorator.
if IS_LOCAL:
pytest.skip("multiple impalads needed")
@classmethod
def teardown_class(cls):
# Explicit override of ImpalaTestSuite.teardown_class(). For custom cluster, the
# ImpalaTestSuite.teardown_class() procedure needs to happen on a per-method basis.
pass
@staticmethod
def with_args(impalad_args=None, statestored_args=None, catalogd_args=None,
start_args=None, sentry_config=None, default_query_options=None,
impala_log_dir=None, sentry_log_dir=None, cluster_size=None):
"""Records arguments to be passed to a cluster by adding them to the decorated
method's func_dict"""
def decorate(func):
if impalad_args is not None:
func.func_dict[IMPALAD_ARGS] = impalad_args
if statestored_args is None:
func.func_dict[STATESTORED_ARGS] = DEFAULT_STATESTORE_ARGS
else:
func.func_dict[STATESTORED_ARGS] = \
DEFAULT_STATESTORE_ARGS + " " + statestored_args
if catalogd_args is not None:
func.func_dict[CATALOGD_ARGS] = catalogd_args
if start_args is not None:
func.func_dict[START_ARGS] = start_args
if sentry_config is not None:
func.func_dict[SENTRY_CONFIG] = sentry_config
if sentry_log_dir is not None:
func.func_dict[SENTRY_LOG_DIR] = sentry_log_dir
if default_query_options is not None:
func.func_dict[DEFAULT_QUERY_OPTIONS] = default_query_options
if impala_log_dir is not None:
func.func_dict[IMPALA_LOG_DIR] = impala_log_dir
if cluster_size is not None:
func.func_dict[CLUSTER_SIZE] = cluster_size
return func
return decorate
def setup_method(self, method):
cluster_args = list()
for arg in [IMPALAD_ARGS, STATESTORED_ARGS, CATALOGD_ARGS]:
if arg in method.func_dict:
cluster_args.append("--%s=%s " % (arg, method.func_dict[arg]))
if START_ARGS in method.func_dict:
cluster_args.append(method.func_dict[START_ARGS])
if SENTRY_CONFIG in method.func_dict:
self._start_sentry_service(method.func_dict[SENTRY_CONFIG],
method.func_dict.get(SENTRY_LOG_DIR))
cluster_size = DEFAULT_CLUSTER_SIZE
if CLUSTER_SIZE in method.func_dict:
cluster_size = method.func_dict[CLUSTER_SIZE]
# Start a clean new cluster before each test
if IMPALA_LOG_DIR in method.func_dict:
self._start_impala_cluster(cluster_args,
default_query_options=method.func_dict.get(DEFAULT_QUERY_OPTIONS),
impala_log_dir=method.func_dict[IMPALA_LOG_DIR], cluster_size=cluster_size)
else:
self._start_impala_cluster(cluster_args,
default_query_options=method.func_dict.get(DEFAULT_QUERY_OPTIONS),
cluster_size=cluster_size, num_coordinators=cluster_size,
expected_num_executors=cluster_size)
super(CustomClusterTestSuite, self).setup_class()
def teardown_method(self, method):
super(CustomClusterTestSuite, self).teardown_class()
@classmethod
def _stop_impala_cluster(cls):
# TODO: Figure out a better way to handle case where processes are just starting
# / cleaning up so that sleeps are not needed.
sleep(2)
check_call([os.path.join(IMPALA_HOME, 'bin/start-impala-cluster.py'), '--kill_only'])
sleep(2)
@classmethod
def _start_sentry_service(cls, sentry_service_config, sentry_log_dir=None):
sentry_env = dict(os.environ)
if sentry_log_dir is not None:
sentry_env['SENTRY_LOG_DIR'] = sentry_log_dir
sentry_env['SENTRY_SERVICE_CONFIG'] = sentry_service_config
call = subprocess.Popen(
['/bin/bash', '-c', os.path.join(IMPALA_HOME,
'testdata/bin/run-sentry-service.sh')],
env=sentry_env)
call.wait()
if call.returncode != 0:
raise RuntimeError("Unable to start Sentry")
@classmethod
def _stop_sentry_service(cls):
subprocess.check_call([os.path.join(os.environ["IMPALA_HOME"],
"testdata/bin/kill-sentry-service.sh")],
close_fds=True)
@classmethod
def _start_impala_cluster(cls, options, impala_log_dir=os.getenv('LOG_DIR', "/tmp/"),
cluster_size=DEFAULT_CLUSTER_SIZE, num_coordinators=NUM_COORDINATORS,
use_exclusive_coordinators=False, log_level=1,
expected_num_executors=DEFAULT_CLUSTER_SIZE, default_query_options=None):
cls.impala_log_dir = impala_log_dir
# We ignore TEST_START_CLUSTER_ARGS here. Custom cluster tests specifically test that
# certain custom startup arguments work and we want to keep them independent of dev
# environments.
cmd = [os.path.join(IMPALA_HOME, 'bin/start-impala-cluster.py'),
'--cluster_size=%d' % cluster_size,
'--num_coordinators=%d' % num_coordinators,
'--log_dir=%s' % impala_log_dir,
'--log_level=%s' % log_level]
if use_exclusive_coordinators:
cmd.append("--use_exclusive_coordinators")
if pytest.config.option.use_local_catalog:
cmd.append("--impalad_args=--use_local_catalog=1")
cmd.append("--catalogd_args=--catalog_topic_mode=minimal")
if pytest.config.option.pull_incremental_statistics:
cmd.append("--impalad_args=%s --catalogd_args=%s" %
("--pull_incremental_statistics", "--pull_incremental_statistics"))
default_query_option_kvs = []
# Put any defaults first, then any arguments after that so they can override defaults.
if os.environ.get("ERASURE_CODING") == "true":
default_query_option_kvs.append(("allow_erasure_coded_files", "true"))
if default_query_options is not None:
default_query_option_kvs.extend(default_query_options)
# Add the default query options after any arguments. This will override any default
# options set in --impalad_args by design to force tests to pass default_query_options
# into this function directly.
options.append("--impalad_args=--default_query_options={0}".format(
','.join(["{0}={1}".format(k, v) for k, v in default_query_option_kvs])))
logging.info("Starting cluster with command: %s" %
" ".join(pipes.quote(arg) for arg in cmd + options))
try:
check_call(cmd + options, close_fds=True)
finally:
# Failure tests expect cluster to be initialised even if start-impala-cluster fails.
cls.cluster = ImpalaCluster()
statestored = cls.cluster.statestored
if statestored is None:
raise Exception("statestored was not found")
# The number of statestore subscribers is
# cluster_size (# of impalad) + 1 (for catalogd).
expected_subscribers = cluster_size + 1
statestored.service.wait_for_live_subscribers(expected_subscribers, timeout=60)
for impalad in cls.cluster.impalads:
impalad.service.wait_for_num_known_live_backends(expected_num_executors, timeout=60)
| 43.526531 | 90 | 0.73434 |
1ed95c3456e8611191b93300cff78493cca0c089 | 4,257 | py | Python | models/pointnet_cls.py | aviros/pointnet_totations | 3b6deac9fb2445776ce91c6fcf37b8ec9d497a1b | [
"MIT"
] | null | null | null | models/pointnet_cls.py | aviros/pointnet_totations | 3b6deac9fb2445776ce91c6fcf37b8ec9d497a1b | [
"MIT"
] | null | null | null | models/pointnet_cls.py | aviros/pointnet_totations | 3b6deac9fb2445776ce91c6fcf37b8ec9d497a1b | [
"MIT"
] | null | null | null | import tensorflow as tf
import numpy as np
import math
import sys
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR, '../utils'))
import tf_util
from transform_nets import input_transform_net, feature_transform_net
def placeholder_inputs(batch_size, num_point, rotation_number=1):
pointclouds_pl = tf.placeholder(tf.float32, shape=(batch_size*rotation_number, num_point, 3))
labels_pl = tf.placeholder(tf.int32, shape=(batch_size*rotation_number))
return pointclouds_pl, labels_pl
def get_model(point_cloud, is_training, rotation_number=1, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
# with tf.variable_scope('transform_net1') as sc:
# transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
# point_cloud_transformed = tf.matmul(point_cloud, transform)
input_image = tf.expand_dims(point_cloud, -1)
net = tf_util.conv2d(input_image, 64, [1,3],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv1', bn_decay=bn_decay)
net = tf_util.conv2d(net, 64, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv2', bn_decay=bn_decay)
with tf.variable_scope('transform_net2') as sc:
transform = feature_transform_net(net, is_training, bn_decay, K=64)
end_points['transform'] = transform
net_transformed = tf.matmul(tf.squeeze(net, axis=[2]), transform)
net_transformed = tf.expand_dims(net_transformed, [2])
net = tf_util.conv2d(net_transformed, 64, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv3', bn_decay=bn_decay)
net = tf_util.conv2d(net, 128, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv4', bn_decay=bn_decay)
net = tf_util.conv2d(net, 1024, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv5', bn_decay=bn_decay)
# Symmetric function: max pooling
net = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.reshape(net, [batch_size, -1])
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, rotation_number, activation_fn=None, scope='fc3')
#TODO: change last output according to prediction class
return net, end_points
def get_loss(pred, label, end_points, reg_weight=0.001):
""" pred: B*NUM_CLASSES,
label: B, """
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=pred, labels=label)
classify_loss = tf.reduce_mean(loss)
tf.summary.scalar('classify loss', classify_loss)
# Enforce the transformation as orthogonal matrix
transform = end_points['transform'] # BxKxK
K = transform.get_shape()[1].value
mat_diff = tf.matmul(transform, tf.transpose(transform, perm=[0,2,1]))
mat_diff -= tf.constant(np.eye(K), dtype=tf.float32)
mat_diff_loss = tf.nn.l2_loss(mat_diff)
tf.summary.scalar('mat loss', mat_diff_loss)
return classify_loss + mat_diff_loss * reg_weight
if __name__=='__main__':
with tf.Graph().as_default():
inputs = tf.zeros((32,1024,3))
outputs = get_model(inputs, tf.constant(True))
print(outputs)
| 43 | 97 | 0.63284 |
72dc2ab7a61976b4903e7d4c332dd5b27cf19fc0 | 1,578 | py | Python | hail/python/hailtop/hail_logging.py | mitochon/hail | 25e5e5b8da1d978468d2cee393426ade46484a87 | [
"MIT"
] | null | null | null | hail/python/hailtop/hail_logging.py | mitochon/hail | 25e5e5b8da1d978468d2cee393426ade46484a87 | [
"MIT"
] | 3 | 2017-06-16T18:10:45.000Z | 2017-07-21T17:44:13.000Z | hail/python/hailtop/hail_logging.py | mitochon/hail | 25e5e5b8da1d978468d2cee393426ade46484a87 | [
"MIT"
] | 2 | 2018-01-30T00:50:52.000Z | 2018-03-22T20:04:01.000Z | import logging
from pythonjsonlogger import jsonlogger
from aiohttp.abc import AbstractAccessLogger
import datetime
import time
class CustomJsonFormatter(jsonlogger.JsonFormatter):
def add_fields(self, log_record, record, message_dict):
super().add_fields(log_record, record, message_dict)
log_record['funcNameAndLine'] = "{}:{}".format(record.funcName, record.lineno)
log_record['hail_log'] = 1
def configure_logging():
fmt = CustomJsonFormatter('(levelname) (asctime) (filename) (funcNameAndLine) (message)')
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(fmt)
logging.basicConfig(handlers=[stream_handler], level=logging.INFO)
class AccessLogger(AbstractAccessLogger):
def log(self, request, response, duration):
tz = datetime.timezone(datetime.timedelta(seconds=-time.timezone))
now = datetime.datetime.now(tz)
start_time = now - datetime.timedelta(seconds=duration)
start_time_str = start_time.strftime('[%d/%b/%Y:%H:%M:%S %z]')
self.logger.info(f'{request.scheme} {request.method} {request.path} '
f'done in {duration}s: {response.status}',
extra={'remote_address': request.remote,
'request_start_time': start_time_str,
'request_duration': duration,
'response_status': response.status,
'x_real_ip': request.headers.get("X-Real-IP")})
| 41.526316 | 93 | 0.651458 |
8d23bd4e6ce9e9a3d65d593849058b5b9a11799f | 9,651 | py | Python | hackerrank/euler009/euler009_brute.py | jcpince/algorithms | c43dd8e98a0f0df691ead5f25c2c17a9241db908 | [
"MIT"
] | null | null | null | hackerrank/euler009/euler009_brute.py | jcpince/algorithms | c43dd8e98a0f0df691ead5f25c2c17a9241db908 | [
"MIT"
] | null | null | null | hackerrank/euler009/euler009_brute.py | jcpince/algorithms | c43dd8e98a0f0df691ead5f25c2c17a9241db908 | [
"MIT"
] | null | null | null | pythagoreans = {2560: 534773760, 1026: 38372400, 2052: 306979200, 2080: 304711680, 1032: 38163360, 2220: 379897500, 2728: 721337760, 12: 60, 2050: 260071200, 1040: 38088960, 2392: 308855040, 1944: 255091680, 1044: 39510180, 2070: 301507920, 2592: 604661760, 24: 480, 1540: 129692640, 1050: 41107500, 2076: 310663020, 30: 780, 1056: 41627520, 1200: 60000000, 36: 1620, 1064: 42325920, 2090: 231990000, 1068: 42298140, 48: 3840, 2568: 588020640, 2100: 328860000, 2910: 711884940, 1078: 41201160, 56: 4200, 2920: 793594680, 2106: 330936840, 60: 7500, 2832: 788655360, 1088: 16776960, 2400: 480000000, 1092: 45214260, 70: 12180, 72: 12960, 1100: 30911100, 2914: 851121120, 1102: 45721980, 1104: 46721280, 2744: 494125800, 2130: 279170580, 84: 20580, 2574: 604220760, 1110: 39509340, 2136: 338385120, 2916: 860934420, 90: 21060, 1116: 48261420, 1776: 194507520, 2142: 328090140, 96: 30720, 2064: 305306880, 1122: 47561580, 2148: 344120340, 2150: 257084100, 2356: 399766080, 1128: 49835040, 1640: 140598840, 108: 43740, 2288: 423405840, 1134: 48682620, 112: 33600, 2408: 473786040, 1140: 51442500, 120: 60000, 2580: 609522420, 2170: 362854380, 2172: 355784460, 1150: 25743900, 2176: 134215680, 1216: 50269440, 2178: 313610220, 132: 79860, 2184: 369609240, 876: 23341020, 2668: 615027360, 140: 97440, 1218: 63969360, 2190: 303433260, 144: 103680, 2072: 306479880, 1170: 56745000, 2196: 367709220, 150: 97500, 2494: 543118380, 2200: 339405000, 154: 120120, 156: 131820, 2208: 381840480, 1904: 206088960, 2210: 367965000, 1188: 58217940, 1190: 59840340, 168: 164640, 1564: 134191200, 1196: 38606880, 176: 192720, 1224: 65159640, 180: 202500, 1054: 21469980, 182: 92820, 2232: 386091360, 2420: 266173380, 1212: 61818060, 192: 245760, 1568: 92198400, 1856: 123052800, 2244: 392352180, 2688: 674365440, 198: 235620, 200: 255000, 2250: 329062500, 2388: 472835940, 204: 294780, 1230: 53758380, 1232: 66102960, 2424: 494544480, 210: 328860, 416: 2221440, 1236: 65563620, 2316: 431343420, 216: 349920, 1242: 51443640, 2268: 405076140, 1248: 67491840, 208: 277680, 228: 411540, 1254: 50109840, 2280: 411540000, 1404: 98055360, 234: 453960, 1260: 71033760, 240: 480000, 40: 2040, 2292: 418072260, 2430: 414523980, 2294: 343067700, 2088: 316081440, 2296: 426677160, 1748: 158159040, 1274: 31837260, 2300: 205951200, 1280: 66846720, 1920: 245760000, 260: 595140, 2262: 404422980, 2310: 437712660, 2600: 595140000, 1288: 64219680, 1290: 62015460, 1292: 45426720, 270: 568620, 1296: 75582720, 1240: 63957960, 2322: 402541920, 2464: 528823680, 276: 730020, 1302: 77482020, 280: 779520, 1924: 253015620, 2900: 624590400, 1308: 77701740, 286: 643500, 288: 829440, 1584: 140492880, 2340: 453960000, 1320: 81016320, 220: 199980, 2346: 452895300, 300: 937500, 2350: 172005900, 2352: 451772160, 1416: 98581920, 1330: 83542620, 308: 960960, 1334: 76878420, 312: 1054560, 2364: 458722380, 1272: 71460960, 2366: 203925540, 1344: 84295680, 1760: 192720000, 2370: 384570420, 324: 1180980, 1350: 71077500, 2376: 465743520, 2444: 286644540, 330: 1265880, 2380: 478722720, 336: 1317120, 1080: 44127720, 2432: 402155520, 340: 1063860, 2958: 691639560, 1368: 88892640, 2788: 634953060, 2394: 458044020, 348: 1463340, 2618: 604208220, 350: 1522500, 352: 1541760, 2448: 521277120, 2944: 905103360, 2880: 848652480, 1380: 91252500, 360: 1620000, 2108: 171759840, 1386: 88884180, 364: 742560, 1426: 101474160, 1392: 93653760, 1768: 188398080, 2418: 456034800, 372: 1787460, 374: 1761540, 1400: 97440000, 378: 1803060, 380: 619020, 1406: 17335980, 384: 1966080, 2112: 333020160, 1410: 80981940, 2436: 511754880, 390: 1713660, 392: 1440600, 2796: 758960220, 2442: 313015560, 396: 2156220, 400: 2040000, 920: 24820680, 2450: 522217500, 1428: 101109540, 1430: 80437500, 408: 2413320, 2328: 438083040, 2460: 524936940, 1440: 103680000, 2912: 761953920, 418: 1855920, 420: 2630880, 2470: 531667500, 2128: 338607360, 1450: 78073800, 1452: 106293660, 1456: 95244240, 1608: 144366240, 2484: 532184580, 1276: 71411340, 440: 2715240, 442: 2943720, 444: 3039180, 1470: 112798980, 2496: 539934720, 2976: 915179520, 450: 2632500, 1476: 111652020, 1788: 198476940, 2004: 279447780, 1480: 103332120, 1612: 118651260, 1482: 114840180, 1720: 162194280, 2508: 547759740, 462: 3243240, 2368: 335344320, 2808: 784442880, 2516: 272596020, 2520: 568270080, 476: 3220140, 480: 3840000, 80: 16320, 1508: 71275620, 1512: 120022560, 252: 555660, 490: 4177740, 492: 4135260, 494: 4253340, 1520: 111938880, 1960: 267375360, 2124: 332713980, 2548: 254698080, 2550: 583297200, 504: 4445280, 2132: 316953780, 1530: 103467780, 1408: 98672640, 510: 3832140, 2240: 399114240, 1536: 125829120, 1792: 137625600, 2472: 524508960, 516: 4770420, 2646: 618449580, 520: 4761120, 2988: 926294940, 1548: 128801340, 770: 16211580, 1550: 116798700, 528: 5203440, 1624: 102433800, 1554: 86651040, 532: 5290740, 1560: 131820000, 1284: 73502580, 540: 5467500, 1566: 57393900, 544: 2097120, 432: 2799360, 546: 5088720, 1572: 134885460, 552: 5840160, 1628: 121196460, 2604: 619856160, 560: 6236160, 264: 638880, 2610: 595539360, 564: 6229380, 1590: 116124060, 2616: 621613920, 1972: 204930240, 570: 6044280, 1596: 142849980, 2622: 533786760, 576: 6635520, 1120: 49889280, 2440: 463041240, 2628: 630207540, 2632: 515332440, 780: 16477500, 1610: 148194060, 588: 7058940, 224: 268800, 2304: 424673280, 2752: 727890240, 2640: 650430000, 1464: 108950880, 594: 6361740, 2236: 330134220, 1620: 147622500, 598: 4825860, 600: 7500000, 612: 7959060, 2652: 647631660, 2490: 445993860, 2816: 789381120, 608: 6283680, 2320: 441904320, 2748: 720539340, 2660: 668340960, 1638: 155708280, 616: 7687680, 2680: 613556520, 2666: 671672040, 1644: 154281180, 2670: 549875820, 624: 8436480, 2664: 656462880, 1650: 158235000, 2676: 665374020, 2544: 571687680, 630: 8879220, 1656: 157684320, 1300: 74392500, 636: 8932620, 640: 8355840, 448: 2150400, 644: 8027460, 646: 5678340, 648: 9447840, 2156: 329609280, 650: 2441400, 2700: 697920300, 2924: 813822300, 1680: 168376320, 2840: 730138440, 2204: 365775840, 660: 10127040, 1160: 55238040, 2712: 692590560, 1600: 135878400, 704: 12334080, 2784: 749230080, 1694: 159879720, 672: 10536960, 2160: 353021760, 1700: 172828800, 1702: 155477700, 680: 10022520, 2844: 798723180, 2730: 722505420, 684: 11111580, 1710: 163195560, 2736: 711141120, 456: 3292320, 690: 11166960, 1716: 175452420, 696: 11706720, 1722: 28964040, 700: 12180000, 702: 12256920, 1728: 179159040, 1824: 210708480, 2754: 674922780, 708: 12322740, 1736: 125122200, 972: 31886460, 714: 11324040, 1740: 182917500, 720: 12960000, 1144: 41184000, 1880: 211798920, 2772: 739583460, 1750: 190312500, 728: 9227400, 2852: 811793280, 732: 13618860, 736: 14142240, 1488: 114397440, 1764: 190591380, 1830: 177045180, 2790: 768271140, 744: 14299680, 1770: 160195620, 748: 14092320, 750: 12187500, 2800: 779520000, 2480: 511663680, 756: 15002820, 126: 66780, 1782: 171766980, 760: 13992360, 468: 3631680, 2812: 138687840, 768: 15728640, 1152: 53084160, 1794: 157728480, 2992: 946833360, 2820: 778672500, 1798: 82042740, 1800: 206791200, 2860: 792131340, 1804: 86889660, 782: 16773900, 784: 11524800, 2576: 513757440, 1812: 206577060, 1326: 79480440, 2838: 776107860, 792: 17249760, 1668: 161137140, 2842: 634845960, 2720: 641441280, 1820: 214075680, 2010: 234595140, 798: 9903180, 572: 5148000, 800: 16984800, 1840: 198565440, 2850: 794277900, 804: 18045780, 2584: 363413760, 2856: 827768760, 1500: 117187500, 810: 15352740, 1836: 214894620, 306: 354960, 2862: 104548860, 816: 19306560, 1672: 118778880, 2868: 819115140, 2870: 839457780, 1848: 219135840, 1332: 82057860, 1664: 142172160, 2624: 600200640, 828: 19710540, 1674: 114735960, 832: 17771520, 2016: 284497920, 2960: 826656960, 836: 14847360, 2590: 616953540, 2886: 277373460, 840: 21047040, 1164: 54760380, 2892: 839851260, 1870: 220192500, 1872: 232427520, 2360: 418973160, 850: 21603600, 852: 21474660, 2904: 850349280, 2532: 563635860, 858: 17374500, 1884: 232193580, 1850: 221933400, 1886: 144788220, 2256: 398680320, 864: 22394880, 2704: 610062960, 1890: 239738940, 1892: 36752100, 870: 19023420, 1896: 236658720, 874: 19769880, 1900: 235341600, 2706: 624003600, 2926: 823903080, 880: 24090000, 2024: 82457760, 882: 22905540, 884: 23549760, 888: 24313440, 1632: 154452480, 1914: 164029800, 2940: 902391840, 2538: 401156280, 896: 17203200, 320: 1044480, 900: 25848900, 1472: 113137920, 2928: 871607040, 2952: 893216160, 1932: 250396860, 910: 26759460, 912: 26338560, 1176: 56471520, 1938: 153315180, 2724: 701824980, 2964: 918721440, 1518: 124126860, 918: 24997140, 2968: 625283400, 1860: 223432500, 2970: 922826520, 2120: 303709080, 924: 27391980, 1178: 49970760, 1950: 246546300, 928: 15381600, 2542: 566383020, 930: 23236980, 1956: 259844820, 936: 29053440, 1692: 168193260, 1936: 256510320, 2990: 603232500, 1968: 264656640, 948: 29582340, 950: 29417700, 952: 25761120, 1524: 122902980, 1978: 110253720, 1980: 273430080, 2378: 477835320, 1752: 186728160, 2214: 372726900, 960: 30720000, 160: 130560, 2146: 326277840, 966: 16335060, 1992: 274457760, 1356: 86573820, 2760: 730020000, 1998: 281578140, 2000: 255000000, 2552: 571290720, 2002: 263903640, 1976: 272213760, 980: 33421920, 1984: 57258240, 984: 33082080, 1908: 241180740, 986: 25616280, 988: 34026720, 990: 34178760, 992: 7157280, 1360: 80180160, 996: 34307220, 2726: 628806420, 2412: 487236060, 1000: 31875000, 2556: 579815820, 2028: 289608540, 2898: 812512260, 2030: 297058020, 1496: 112738560, 1008: 35562240, 1704: 171797280, 3000: 937500000, 1012: 10307220, 2040: 301665000, 1364: 90167220, 1020: 36847500, 2046: 114514620}
t = int(input().strip())
for a0 in range(t):
n = int(input().strip())
print(pythagoreans.setdefault(n, -1))
| 1,378.714286 | 9,533 | 0.745622 |
88bad06a304e0b32601988c6c12d380586f6c7a6 | 78,564 | py | Python | pika/spec.py | Anthchirp/pika | 55557c9172edb04256a5e49b7faf13078d5a0f66 | [
"BSD-3-Clause"
] | 2,479 | 2015-01-01T20:06:23.000Z | 2022-03-31T13:29:19.000Z | pika/spec.py | Anthchirp/pika | 55557c9172edb04256a5e49b7faf13078d5a0f66 | [
"BSD-3-Clause"
] | 813 | 2015-01-07T07:13:49.000Z | 2022-03-28T05:05:06.000Z | pika/spec.py | Anthchirp/pika | 55557c9172edb04256a5e49b7faf13078d5a0f66 | [
"BSD-3-Clause"
] | 763 | 2015-01-10T04:38:33.000Z | 2022-03-31T07:24:57.000Z | """
AMQP Specification
==================
This module implements the constants and classes that comprise AMQP protocol
level constructs. It should rarely be directly referenced outside of Pika's
own internal use.
.. note:: Auto-generated code by codegen.py, do not edit directly. Pull
requests to this file without accompanying ``utils/codegen.py`` changes will be
rejected.
"""
import struct
from pika import amqp_object
from pika import data
from pika.compat import str_or_bytes, unicode_type
from pika.exchange_type import ExchangeType
from pika.delivery_mode import DeliveryMode
# Python 3 support for str object
str = bytes
PROTOCOL_VERSION = (0, 9, 1)
PORT = 5672
ACCESS_REFUSED = 403
CHANNEL_ERROR = 504
COMMAND_INVALID = 503
CONNECTION_FORCED = 320
CONTENT_TOO_LARGE = 311
FRAME_BODY = 3
FRAME_END = 206
FRAME_END_SIZE = 1
FRAME_ERROR = 501
FRAME_HEADER = 2
FRAME_HEADER_SIZE = 7
FRAME_HEARTBEAT = 8
FRAME_MAX_SIZE = 131072
FRAME_METHOD = 1
FRAME_MIN_SIZE = 4096
INTERNAL_ERROR = 541
INVALID_PATH = 402
NOT_ALLOWED = 530
NOT_FOUND = 404
NOT_IMPLEMENTED = 540
NO_CONSUMERS = 313
NO_ROUTE = 312
PERSISTENT_DELIVERY_MODE = 2
PRECONDITION_FAILED = 406
REPLY_SUCCESS = 200
RESOURCE_ERROR = 506
RESOURCE_LOCKED = 405
SYNTAX_ERROR = 502
TRANSIENT_DELIVERY_MODE = 1
UNEXPECTED_FRAME = 505
class Connection(amqp_object.Class):
INDEX = 0x000A # 10
NAME = 'Connection'
class Start(amqp_object.Method):
INDEX = 0x000A000A # 10, 10; 655370
NAME = 'Connection.Start'
def __init__(self, version_major=0, version_minor=9, server_properties=None, mechanisms='PLAIN', locales='en_US'):
self.version_major = version_major
self.version_minor = version_minor
self.server_properties = server_properties
self.mechanisms = mechanisms
self.locales = locales
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.version_major = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.version_minor = struct.unpack_from('B', encoded, offset)[0]
offset += 1
(self.server_properties, offset) = data.decode_table(encoded, offset)
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.mechanisms = encoded[offset:offset + length]
try:
self.mechanisms = str(self.mechanisms)
except UnicodeEncodeError:
pass
offset += length
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.locales = encoded[offset:offset + length]
try:
self.locales = str(self.locales)
except UnicodeEncodeError:
pass
offset += length
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('B', self.version_major))
pieces.append(struct.pack('B', self.version_minor))
data.encode_table(pieces, self.server_properties)
assert isinstance(self.mechanisms, str_or_bytes),\
'A non-string value was supplied for self.mechanisms'
value = self.mechanisms.encode('utf-8') if isinstance(self.mechanisms, unicode_type) else self.mechanisms
pieces.append(struct.pack('>I', len(value)))
pieces.append(value)
assert isinstance(self.locales, str_or_bytes),\
'A non-string value was supplied for self.locales'
value = self.locales.encode('utf-8') if isinstance(self.locales, unicode_type) else self.locales
pieces.append(struct.pack('>I', len(value)))
pieces.append(value)
return pieces
class StartOk(amqp_object.Method):
INDEX = 0x000A000B # 10, 11; 655371
NAME = 'Connection.StartOk'
def __init__(self, client_properties=None, mechanism='PLAIN', response=None, locale='en_US'):
self.client_properties = client_properties
self.mechanism = mechanism
self.response = response
self.locale = locale
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
(self.client_properties, offset) = data.decode_table(encoded, offset)
self.mechanism, offset = data.decode_short_string(encoded, offset)
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.response = encoded[offset:offset + length]
try:
self.response = str(self.response)
except UnicodeEncodeError:
pass
offset += length
self.locale, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
data.encode_table(pieces, self.client_properties)
assert isinstance(self.mechanism, str_or_bytes),\
'A non-string value was supplied for self.mechanism'
data.encode_short_string(pieces, self.mechanism)
assert isinstance(self.response, str_or_bytes),\
'A non-string value was supplied for self.response'
value = self.response.encode('utf-8') if isinstance(self.response, unicode_type) else self.response
pieces.append(struct.pack('>I', len(value)))
pieces.append(value)
assert isinstance(self.locale, str_or_bytes),\
'A non-string value was supplied for self.locale'
data.encode_short_string(pieces, self.locale)
return pieces
class Secure(amqp_object.Method):
INDEX = 0x000A0014 # 10, 20; 655380
NAME = 'Connection.Secure'
def __init__(self, challenge=None):
self.challenge = challenge
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.challenge = encoded[offset:offset + length]
try:
self.challenge = str(self.challenge)
except UnicodeEncodeError:
pass
offset += length
return self
def encode(self):
pieces = list()
assert isinstance(self.challenge, str_or_bytes),\
'A non-string value was supplied for self.challenge'
value = self.challenge.encode('utf-8') if isinstance(self.challenge, unicode_type) else self.challenge
pieces.append(struct.pack('>I', len(value)))
pieces.append(value)
return pieces
class SecureOk(amqp_object.Method):
INDEX = 0x000A0015 # 10, 21; 655381
NAME = 'Connection.SecureOk'
def __init__(self, response=None):
self.response = response
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.response = encoded[offset:offset + length]
try:
self.response = str(self.response)
except UnicodeEncodeError:
pass
offset += length
return self
def encode(self):
pieces = list()
assert isinstance(self.response, str_or_bytes),\
'A non-string value was supplied for self.response'
value = self.response.encode('utf-8') if isinstance(self.response, unicode_type) else self.response
pieces.append(struct.pack('>I', len(value)))
pieces.append(value)
return pieces
class Tune(amqp_object.Method):
INDEX = 0x000A001E # 10, 30; 655390
NAME = 'Connection.Tune'
def __init__(self, channel_max=0, frame_max=0, heartbeat=0):
self.channel_max = channel_max
self.frame_max = frame_max
self.heartbeat = heartbeat
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.channel_max = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.frame_max = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.heartbeat = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.channel_max))
pieces.append(struct.pack('>I', self.frame_max))
pieces.append(struct.pack('>H', self.heartbeat))
return pieces
class TuneOk(amqp_object.Method):
INDEX = 0x000A001F # 10, 31; 655391
NAME = 'Connection.TuneOk'
def __init__(self, channel_max=0, frame_max=0, heartbeat=0):
self.channel_max = channel_max
self.frame_max = frame_max
self.heartbeat = heartbeat
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.channel_max = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.frame_max = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.heartbeat = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.channel_max))
pieces.append(struct.pack('>I', self.frame_max))
pieces.append(struct.pack('>H', self.heartbeat))
return pieces
class Open(amqp_object.Method):
INDEX = 0x000A0028 # 10, 40; 655400
NAME = 'Connection.Open'
def __init__(self, virtual_host='/', capabilities='', insist=False):
self.virtual_host = virtual_host
self.capabilities = capabilities
self.insist = insist
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.virtual_host, offset = data.decode_short_string(encoded, offset)
self.capabilities, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.insist = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
assert isinstance(self.virtual_host, str_or_bytes),\
'A non-string value was supplied for self.virtual_host'
data.encode_short_string(pieces, self.virtual_host)
assert isinstance(self.capabilities, str_or_bytes),\
'A non-string value was supplied for self.capabilities'
data.encode_short_string(pieces, self.capabilities)
bit_buffer = 0
if self.insist:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class OpenOk(amqp_object.Method):
INDEX = 0x000A0029 # 10, 41; 655401
NAME = 'Connection.OpenOk'
def __init__(self, known_hosts=''):
self.known_hosts = known_hosts
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.known_hosts, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
assert isinstance(self.known_hosts, str_or_bytes),\
'A non-string value was supplied for self.known_hosts'
data.encode_short_string(pieces, self.known_hosts)
return pieces
class Close(amqp_object.Method):
INDEX = 0x000A0032 # 10, 50; 655410
NAME = 'Connection.Close'
def __init__(self, reply_code=None, reply_text='', class_id=None, method_id=None):
self.reply_code = reply_code
self.reply_text = reply_text
self.class_id = class_id
self.method_id = method_id
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.reply_code = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.reply_text, offset = data.decode_short_string(encoded, offset)
self.class_id = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.method_id = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.reply_code))
assert isinstance(self.reply_text, str_or_bytes),\
'A non-string value was supplied for self.reply_text'
data.encode_short_string(pieces, self.reply_text)
pieces.append(struct.pack('>H', self.class_id))
pieces.append(struct.pack('>H', self.method_id))
return pieces
class CloseOk(amqp_object.Method):
INDEX = 0x000A0033 # 10, 51; 655411
NAME = 'Connection.CloseOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Blocked(amqp_object.Method):
INDEX = 0x000A003C # 10, 60; 655420
NAME = 'Connection.Blocked'
def __init__(self, reason=''):
self.reason = reason
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.reason, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
assert isinstance(self.reason, str_or_bytes),\
'A non-string value was supplied for self.reason'
data.encode_short_string(pieces, self.reason)
return pieces
class Unblocked(amqp_object.Method):
INDEX = 0x000A003D # 10, 61; 655421
NAME = 'Connection.Unblocked'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Channel(amqp_object.Class):
INDEX = 0x0014 # 20
NAME = 'Channel'
class Open(amqp_object.Method):
INDEX = 0x0014000A # 20, 10; 1310730
NAME = 'Channel.Open'
def __init__(self, out_of_band=''):
self.out_of_band = out_of_band
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.out_of_band, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
assert isinstance(self.out_of_band, str_or_bytes),\
'A non-string value was supplied for self.out_of_band'
data.encode_short_string(pieces, self.out_of_band)
return pieces
class OpenOk(amqp_object.Method):
INDEX = 0x0014000B # 20, 11; 1310731
NAME = 'Channel.OpenOk'
def __init__(self, channel_id=''):
self.channel_id = channel_id
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
length = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.channel_id = encoded[offset:offset + length]
try:
self.channel_id = str(self.channel_id)
except UnicodeEncodeError:
pass
offset += length
return self
def encode(self):
pieces = list()
assert isinstance(self.channel_id, str_or_bytes),\
'A non-string value was supplied for self.channel_id'
value = self.channel_id.encode('utf-8') if isinstance(self.channel_id, unicode_type) else self.channel_id
pieces.append(struct.pack('>I', len(value)))
pieces.append(value)
return pieces
class Flow(amqp_object.Method):
INDEX = 0x00140014 # 20, 20; 1310740
NAME = 'Channel.Flow'
def __init__(self, active=None):
self.active = active
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.active = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
bit_buffer = 0
if self.active:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class FlowOk(amqp_object.Method):
INDEX = 0x00140015 # 20, 21; 1310741
NAME = 'Channel.FlowOk'
def __init__(self, active=None):
self.active = active
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.active = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
bit_buffer = 0
if self.active:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class Close(amqp_object.Method):
INDEX = 0x00140028 # 20, 40; 1310760
NAME = 'Channel.Close'
def __init__(self, reply_code=None, reply_text='', class_id=None, method_id=None):
self.reply_code = reply_code
self.reply_text = reply_text
self.class_id = class_id
self.method_id = method_id
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.reply_code = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.reply_text, offset = data.decode_short_string(encoded, offset)
self.class_id = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.method_id = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.reply_code))
assert isinstance(self.reply_text, str_or_bytes),\
'A non-string value was supplied for self.reply_text'
data.encode_short_string(pieces, self.reply_text)
pieces.append(struct.pack('>H', self.class_id))
pieces.append(struct.pack('>H', self.method_id))
return pieces
class CloseOk(amqp_object.Method):
INDEX = 0x00140029 # 20, 41; 1310761
NAME = 'Channel.CloseOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Access(amqp_object.Class):
INDEX = 0x001E # 30
NAME = 'Access'
class Request(amqp_object.Method):
INDEX = 0x001E000A # 30, 10; 1966090
NAME = 'Access.Request'
def __init__(self, realm='/data', exclusive=False, passive=True, active=True, write=True, read=True):
self.realm = realm
self.exclusive = exclusive
self.passive = passive
self.active = active
self.write = write
self.read = read
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.realm, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.exclusive = (bit_buffer & (1 << 0)) != 0
self.passive = (bit_buffer & (1 << 1)) != 0
self.active = (bit_buffer & (1 << 2)) != 0
self.write = (bit_buffer & (1 << 3)) != 0
self.read = (bit_buffer & (1 << 4)) != 0
return self
def encode(self):
pieces = list()
assert isinstance(self.realm, str_or_bytes),\
'A non-string value was supplied for self.realm'
data.encode_short_string(pieces, self.realm)
bit_buffer = 0
if self.exclusive:
bit_buffer |= 1 << 0
if self.passive:
bit_buffer |= 1 << 1
if self.active:
bit_buffer |= 1 << 2
if self.write:
bit_buffer |= 1 << 3
if self.read:
bit_buffer |= 1 << 4
pieces.append(struct.pack('B', bit_buffer))
return pieces
class RequestOk(amqp_object.Method):
INDEX = 0x001E000B # 30, 11; 1966091
NAME = 'Access.RequestOk'
def __init__(self, ticket=1):
self.ticket = ticket
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
return pieces
class Exchange(amqp_object.Class):
INDEX = 0x0028 # 40
NAME = 'Exchange'
class Declare(amqp_object.Method):
INDEX = 0x0028000A # 40, 10; 2621450
NAME = 'Exchange.Declare'
def __init__(self, ticket=0, exchange=None, type=ExchangeType.direct, passive=False, durable=False, auto_delete=False, internal=False, nowait=False, arguments=None):
self.ticket = ticket
self.exchange = exchange
self.type = type
self.passive = passive
self.durable = durable
self.auto_delete = auto_delete
self.internal = internal
self.nowait = nowait
self.arguments = arguments
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.exchange, offset = data.decode_short_string(encoded, offset)
self.type, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.passive = (bit_buffer & (1 << 0)) != 0
self.durable = (bit_buffer & (1 << 1)) != 0
self.auto_delete = (bit_buffer & (1 << 2)) != 0
self.internal = (bit_buffer & (1 << 3)) != 0
self.nowait = (bit_buffer & (1 << 4)) != 0
(self.arguments, offset) = data.decode_table(encoded, offset)
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.exchange, str_or_bytes),\
'A non-string value was supplied for self.exchange'
data.encode_short_string(pieces, self.exchange)
assert isinstance(self.type, str_or_bytes),\
'A non-string value was supplied for self.type'
data.encode_short_string(pieces, self.type)
bit_buffer = 0
if self.passive:
bit_buffer |= 1 << 0
if self.durable:
bit_buffer |= 1 << 1
if self.auto_delete:
bit_buffer |= 1 << 2
if self.internal:
bit_buffer |= 1 << 3
if self.nowait:
bit_buffer |= 1 << 4
pieces.append(struct.pack('B', bit_buffer))
data.encode_table(pieces, self.arguments)
return pieces
class DeclareOk(amqp_object.Method):
INDEX = 0x0028000B # 40, 11; 2621451
NAME = 'Exchange.DeclareOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Delete(amqp_object.Method):
INDEX = 0x00280014 # 40, 20; 2621460
NAME = 'Exchange.Delete'
def __init__(self, ticket=0, exchange=None, if_unused=False, nowait=False):
self.ticket = ticket
self.exchange = exchange
self.if_unused = if_unused
self.nowait = nowait
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.exchange, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.if_unused = (bit_buffer & (1 << 0)) != 0
self.nowait = (bit_buffer & (1 << 1)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.exchange, str_or_bytes),\
'A non-string value was supplied for self.exchange'
data.encode_short_string(pieces, self.exchange)
bit_buffer = 0
if self.if_unused:
bit_buffer |= 1 << 0
if self.nowait:
bit_buffer |= 1 << 1
pieces.append(struct.pack('B', bit_buffer))
return pieces
class DeleteOk(amqp_object.Method):
INDEX = 0x00280015 # 40, 21; 2621461
NAME = 'Exchange.DeleteOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Bind(amqp_object.Method):
INDEX = 0x0028001E # 40, 30; 2621470
NAME = 'Exchange.Bind'
def __init__(self, ticket=0, destination=None, source=None, routing_key='', nowait=False, arguments=None):
self.ticket = ticket
self.destination = destination
self.source = source
self.routing_key = routing_key
self.nowait = nowait
self.arguments = arguments
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.destination, offset = data.decode_short_string(encoded, offset)
self.source, offset = data.decode_short_string(encoded, offset)
self.routing_key, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.nowait = (bit_buffer & (1 << 0)) != 0
(self.arguments, offset) = data.decode_table(encoded, offset)
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.destination, str_or_bytes),\
'A non-string value was supplied for self.destination'
data.encode_short_string(pieces, self.destination)
assert isinstance(self.source, str_or_bytes),\
'A non-string value was supplied for self.source'
data.encode_short_string(pieces, self.source)
assert isinstance(self.routing_key, str_or_bytes),\
'A non-string value was supplied for self.routing_key'
data.encode_short_string(pieces, self.routing_key)
bit_buffer = 0
if self.nowait:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
data.encode_table(pieces, self.arguments)
return pieces
class BindOk(amqp_object.Method):
INDEX = 0x0028001F # 40, 31; 2621471
NAME = 'Exchange.BindOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Unbind(amqp_object.Method):
INDEX = 0x00280028 # 40, 40; 2621480
NAME = 'Exchange.Unbind'
def __init__(self, ticket=0, destination=None, source=None, routing_key='', nowait=False, arguments=None):
self.ticket = ticket
self.destination = destination
self.source = source
self.routing_key = routing_key
self.nowait = nowait
self.arguments = arguments
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.destination, offset = data.decode_short_string(encoded, offset)
self.source, offset = data.decode_short_string(encoded, offset)
self.routing_key, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.nowait = (bit_buffer & (1 << 0)) != 0
(self.arguments, offset) = data.decode_table(encoded, offset)
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.destination, str_or_bytes),\
'A non-string value was supplied for self.destination'
data.encode_short_string(pieces, self.destination)
assert isinstance(self.source, str_or_bytes),\
'A non-string value was supplied for self.source'
data.encode_short_string(pieces, self.source)
assert isinstance(self.routing_key, str_or_bytes),\
'A non-string value was supplied for self.routing_key'
data.encode_short_string(pieces, self.routing_key)
bit_buffer = 0
if self.nowait:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
data.encode_table(pieces, self.arguments)
return pieces
class UnbindOk(amqp_object.Method):
INDEX = 0x00280033 # 40, 51; 2621491
NAME = 'Exchange.UnbindOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Queue(amqp_object.Class):
INDEX = 0x0032 # 50
NAME = 'Queue'
class Declare(amqp_object.Method):
INDEX = 0x0032000A # 50, 10; 3276810
NAME = 'Queue.Declare'
def __init__(self, ticket=0, queue='', passive=False, durable=False, exclusive=False, auto_delete=False, nowait=False, arguments=None):
self.ticket = ticket
self.queue = queue
self.passive = passive
self.durable = durable
self.exclusive = exclusive
self.auto_delete = auto_delete
self.nowait = nowait
self.arguments = arguments
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.queue, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.passive = (bit_buffer & (1 << 0)) != 0
self.durable = (bit_buffer & (1 << 1)) != 0
self.exclusive = (bit_buffer & (1 << 2)) != 0
self.auto_delete = (bit_buffer & (1 << 3)) != 0
self.nowait = (bit_buffer & (1 << 4)) != 0
(self.arguments, offset) = data.decode_table(encoded, offset)
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.queue, str_or_bytes),\
'A non-string value was supplied for self.queue'
data.encode_short_string(pieces, self.queue)
bit_buffer = 0
if self.passive:
bit_buffer |= 1 << 0
if self.durable:
bit_buffer |= 1 << 1
if self.exclusive:
bit_buffer |= 1 << 2
if self.auto_delete:
bit_buffer |= 1 << 3
if self.nowait:
bit_buffer |= 1 << 4
pieces.append(struct.pack('B', bit_buffer))
data.encode_table(pieces, self.arguments)
return pieces
class DeclareOk(amqp_object.Method):
INDEX = 0x0032000B # 50, 11; 3276811
NAME = 'Queue.DeclareOk'
def __init__(self, queue=None, message_count=None, consumer_count=None):
self.queue = queue
self.message_count = message_count
self.consumer_count = consumer_count
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.queue, offset = data.decode_short_string(encoded, offset)
self.message_count = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.consumer_count = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
return self
def encode(self):
pieces = list()
assert isinstance(self.queue, str_or_bytes),\
'A non-string value was supplied for self.queue'
data.encode_short_string(pieces, self.queue)
pieces.append(struct.pack('>I', self.message_count))
pieces.append(struct.pack('>I', self.consumer_count))
return pieces
class Bind(amqp_object.Method):
INDEX = 0x00320014 # 50, 20; 3276820
NAME = 'Queue.Bind'
def __init__(self, ticket=0, queue='', exchange=None, routing_key='', nowait=False, arguments=None):
self.ticket = ticket
self.queue = queue
self.exchange = exchange
self.routing_key = routing_key
self.nowait = nowait
self.arguments = arguments
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.queue, offset = data.decode_short_string(encoded, offset)
self.exchange, offset = data.decode_short_string(encoded, offset)
self.routing_key, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.nowait = (bit_buffer & (1 << 0)) != 0
(self.arguments, offset) = data.decode_table(encoded, offset)
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.queue, str_or_bytes),\
'A non-string value was supplied for self.queue'
data.encode_short_string(pieces, self.queue)
assert isinstance(self.exchange, str_or_bytes),\
'A non-string value was supplied for self.exchange'
data.encode_short_string(pieces, self.exchange)
assert isinstance(self.routing_key, str_or_bytes),\
'A non-string value was supplied for self.routing_key'
data.encode_short_string(pieces, self.routing_key)
bit_buffer = 0
if self.nowait:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
data.encode_table(pieces, self.arguments)
return pieces
class BindOk(amqp_object.Method):
INDEX = 0x00320015 # 50, 21; 3276821
NAME = 'Queue.BindOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Purge(amqp_object.Method):
INDEX = 0x0032001E # 50, 30; 3276830
NAME = 'Queue.Purge'
def __init__(self, ticket=0, queue='', nowait=False):
self.ticket = ticket
self.queue = queue
self.nowait = nowait
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.queue, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.nowait = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.queue, str_or_bytes),\
'A non-string value was supplied for self.queue'
data.encode_short_string(pieces, self.queue)
bit_buffer = 0
if self.nowait:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class PurgeOk(amqp_object.Method):
INDEX = 0x0032001F # 50, 31; 3276831
NAME = 'Queue.PurgeOk'
def __init__(self, message_count=None):
self.message_count = message_count
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.message_count = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>I', self.message_count))
return pieces
class Delete(amqp_object.Method):
INDEX = 0x00320028 # 50, 40; 3276840
NAME = 'Queue.Delete'
def __init__(self, ticket=0, queue='', if_unused=False, if_empty=False, nowait=False):
self.ticket = ticket
self.queue = queue
self.if_unused = if_unused
self.if_empty = if_empty
self.nowait = nowait
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.queue, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.if_unused = (bit_buffer & (1 << 0)) != 0
self.if_empty = (bit_buffer & (1 << 1)) != 0
self.nowait = (bit_buffer & (1 << 2)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.queue, str_or_bytes),\
'A non-string value was supplied for self.queue'
data.encode_short_string(pieces, self.queue)
bit_buffer = 0
if self.if_unused:
bit_buffer |= 1 << 0
if self.if_empty:
bit_buffer |= 1 << 1
if self.nowait:
bit_buffer |= 1 << 2
pieces.append(struct.pack('B', bit_buffer))
return pieces
class DeleteOk(amqp_object.Method):
INDEX = 0x00320029 # 50, 41; 3276841
NAME = 'Queue.DeleteOk'
def __init__(self, message_count=None):
self.message_count = message_count
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.message_count = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>I', self.message_count))
return pieces
class Unbind(amqp_object.Method):
INDEX = 0x00320032 # 50, 50; 3276850
NAME = 'Queue.Unbind'
def __init__(self, ticket=0, queue='', exchange=None, routing_key='', arguments=None):
self.ticket = ticket
self.queue = queue
self.exchange = exchange
self.routing_key = routing_key
self.arguments = arguments
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.queue, offset = data.decode_short_string(encoded, offset)
self.exchange, offset = data.decode_short_string(encoded, offset)
self.routing_key, offset = data.decode_short_string(encoded, offset)
(self.arguments, offset) = data.decode_table(encoded, offset)
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.queue, str_or_bytes),\
'A non-string value was supplied for self.queue'
data.encode_short_string(pieces, self.queue)
assert isinstance(self.exchange, str_or_bytes),\
'A non-string value was supplied for self.exchange'
data.encode_short_string(pieces, self.exchange)
assert isinstance(self.routing_key, str_or_bytes),\
'A non-string value was supplied for self.routing_key'
data.encode_short_string(pieces, self.routing_key)
data.encode_table(pieces, self.arguments)
return pieces
class UnbindOk(amqp_object.Method):
INDEX = 0x00320033 # 50, 51; 3276851
NAME = 'Queue.UnbindOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Basic(amqp_object.Class):
INDEX = 0x003C # 60
NAME = 'Basic'
class Qos(amqp_object.Method):
INDEX = 0x003C000A # 60, 10; 3932170
NAME = 'Basic.Qos'
def __init__(self, prefetch_size=0, prefetch_count=0, global_qos=False):
self.prefetch_size = prefetch_size
self.prefetch_count = prefetch_count
self.global_qos = global_qos
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.prefetch_size = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
self.prefetch_count = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.global_qos = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>I', self.prefetch_size))
pieces.append(struct.pack('>H', self.prefetch_count))
bit_buffer = 0
if self.global_qos:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class QosOk(amqp_object.Method):
INDEX = 0x003C000B # 60, 11; 3932171
NAME = 'Basic.QosOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Consume(amqp_object.Method):
INDEX = 0x003C0014 # 60, 20; 3932180
NAME = 'Basic.Consume'
def __init__(self, ticket=0, queue='', consumer_tag='', no_local=False, no_ack=False, exclusive=False, nowait=False, arguments=None):
self.ticket = ticket
self.queue = queue
self.consumer_tag = consumer_tag
self.no_local = no_local
self.no_ack = no_ack
self.exclusive = exclusive
self.nowait = nowait
self.arguments = arguments
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.queue, offset = data.decode_short_string(encoded, offset)
self.consumer_tag, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.no_local = (bit_buffer & (1 << 0)) != 0
self.no_ack = (bit_buffer & (1 << 1)) != 0
self.exclusive = (bit_buffer & (1 << 2)) != 0
self.nowait = (bit_buffer & (1 << 3)) != 0
(self.arguments, offset) = data.decode_table(encoded, offset)
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.queue, str_or_bytes),\
'A non-string value was supplied for self.queue'
data.encode_short_string(pieces, self.queue)
assert isinstance(self.consumer_tag, str_or_bytes),\
'A non-string value was supplied for self.consumer_tag'
data.encode_short_string(pieces, self.consumer_tag)
bit_buffer = 0
if self.no_local:
bit_buffer |= 1 << 0
if self.no_ack:
bit_buffer |= 1 << 1
if self.exclusive:
bit_buffer |= 1 << 2
if self.nowait:
bit_buffer |= 1 << 3
pieces.append(struct.pack('B', bit_buffer))
data.encode_table(pieces, self.arguments)
return pieces
class ConsumeOk(amqp_object.Method):
INDEX = 0x003C0015 # 60, 21; 3932181
NAME = 'Basic.ConsumeOk'
def __init__(self, consumer_tag=None):
self.consumer_tag = consumer_tag
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.consumer_tag, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
assert isinstance(self.consumer_tag, str_or_bytes),\
'A non-string value was supplied for self.consumer_tag'
data.encode_short_string(pieces, self.consumer_tag)
return pieces
class Cancel(amqp_object.Method):
INDEX = 0x003C001E # 60, 30; 3932190
NAME = 'Basic.Cancel'
def __init__(self, consumer_tag=None, nowait=False):
self.consumer_tag = consumer_tag
self.nowait = nowait
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.consumer_tag, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.nowait = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
assert isinstance(self.consumer_tag, str_or_bytes),\
'A non-string value was supplied for self.consumer_tag'
data.encode_short_string(pieces, self.consumer_tag)
bit_buffer = 0
if self.nowait:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class CancelOk(amqp_object.Method):
INDEX = 0x003C001F # 60, 31; 3932191
NAME = 'Basic.CancelOk'
def __init__(self, consumer_tag=None):
self.consumer_tag = consumer_tag
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.consumer_tag, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
assert isinstance(self.consumer_tag, str_or_bytes),\
'A non-string value was supplied for self.consumer_tag'
data.encode_short_string(pieces, self.consumer_tag)
return pieces
class Publish(amqp_object.Method):
INDEX = 0x003C0028 # 60, 40; 3932200
NAME = 'Basic.Publish'
def __init__(self, ticket=0, exchange='', routing_key='', mandatory=False, immediate=False):
self.ticket = ticket
self.exchange = exchange
self.routing_key = routing_key
self.mandatory = mandatory
self.immediate = immediate
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.exchange, offset = data.decode_short_string(encoded, offset)
self.routing_key, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.mandatory = (bit_buffer & (1 << 0)) != 0
self.immediate = (bit_buffer & (1 << 1)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.exchange, str_or_bytes),\
'A non-string value was supplied for self.exchange'
data.encode_short_string(pieces, self.exchange)
assert isinstance(self.routing_key, str_or_bytes),\
'A non-string value was supplied for self.routing_key'
data.encode_short_string(pieces, self.routing_key)
bit_buffer = 0
if self.mandatory:
bit_buffer |= 1 << 0
if self.immediate:
bit_buffer |= 1 << 1
pieces.append(struct.pack('B', bit_buffer))
return pieces
class Return(amqp_object.Method):
INDEX = 0x003C0032 # 60, 50; 3932210
NAME = 'Basic.Return'
def __init__(self, reply_code=None, reply_text='', exchange=None, routing_key=None):
self.reply_code = reply_code
self.reply_text = reply_text
self.exchange = exchange
self.routing_key = routing_key
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.reply_code = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.reply_text, offset = data.decode_short_string(encoded, offset)
self.exchange, offset = data.decode_short_string(encoded, offset)
self.routing_key, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.reply_code))
assert isinstance(self.reply_text, str_or_bytes),\
'A non-string value was supplied for self.reply_text'
data.encode_short_string(pieces, self.reply_text)
assert isinstance(self.exchange, str_or_bytes),\
'A non-string value was supplied for self.exchange'
data.encode_short_string(pieces, self.exchange)
assert isinstance(self.routing_key, str_or_bytes),\
'A non-string value was supplied for self.routing_key'
data.encode_short_string(pieces, self.routing_key)
return pieces
class Deliver(amqp_object.Method):
INDEX = 0x003C003C # 60, 60; 3932220
NAME = 'Basic.Deliver'
def __init__(self, consumer_tag=None, delivery_tag=None, redelivered=False, exchange=None, routing_key=None):
self.consumer_tag = consumer_tag
self.delivery_tag = delivery_tag
self.redelivered = redelivered
self.exchange = exchange
self.routing_key = routing_key
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.consumer_tag, offset = data.decode_short_string(encoded, offset)
self.delivery_tag = struct.unpack_from('>Q', encoded, offset)[0]
offset += 8
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.redelivered = (bit_buffer & (1 << 0)) != 0
self.exchange, offset = data.decode_short_string(encoded, offset)
self.routing_key, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
assert isinstance(self.consumer_tag, str_or_bytes),\
'A non-string value was supplied for self.consumer_tag'
data.encode_short_string(pieces, self.consumer_tag)
pieces.append(struct.pack('>Q', self.delivery_tag))
bit_buffer = 0
if self.redelivered:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
assert isinstance(self.exchange, str_or_bytes),\
'A non-string value was supplied for self.exchange'
data.encode_short_string(pieces, self.exchange)
assert isinstance(self.routing_key, str_or_bytes),\
'A non-string value was supplied for self.routing_key'
data.encode_short_string(pieces, self.routing_key)
return pieces
class Get(amqp_object.Method):
INDEX = 0x003C0046 # 60, 70; 3932230
NAME = 'Basic.Get'
def __init__(self, ticket=0, queue='', no_ack=False):
self.ticket = ticket
self.queue = queue
self.no_ack = no_ack
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
self.ticket = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
self.queue, offset = data.decode_short_string(encoded, offset)
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.no_ack = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>H', self.ticket))
assert isinstance(self.queue, str_or_bytes),\
'A non-string value was supplied for self.queue'
data.encode_short_string(pieces, self.queue)
bit_buffer = 0
if self.no_ack:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class GetOk(amqp_object.Method):
INDEX = 0x003C0047 # 60, 71; 3932231
NAME = 'Basic.GetOk'
def __init__(self, delivery_tag=None, redelivered=False, exchange=None, routing_key=None, message_count=None):
self.delivery_tag = delivery_tag
self.redelivered = redelivered
self.exchange = exchange
self.routing_key = routing_key
self.message_count = message_count
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.delivery_tag = struct.unpack_from('>Q', encoded, offset)[0]
offset += 8
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.redelivered = (bit_buffer & (1 << 0)) != 0
self.exchange, offset = data.decode_short_string(encoded, offset)
self.routing_key, offset = data.decode_short_string(encoded, offset)
self.message_count = struct.unpack_from('>I', encoded, offset)[0]
offset += 4
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>Q', self.delivery_tag))
bit_buffer = 0
if self.redelivered:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
assert isinstance(self.exchange, str_or_bytes),\
'A non-string value was supplied for self.exchange'
data.encode_short_string(pieces, self.exchange)
assert isinstance(self.routing_key, str_or_bytes),\
'A non-string value was supplied for self.routing_key'
data.encode_short_string(pieces, self.routing_key)
pieces.append(struct.pack('>I', self.message_count))
return pieces
class GetEmpty(amqp_object.Method):
INDEX = 0x003C0048 # 60, 72; 3932232
NAME = 'Basic.GetEmpty'
def __init__(self, cluster_id=''):
self.cluster_id = cluster_id
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.cluster_id, offset = data.decode_short_string(encoded, offset)
return self
def encode(self):
pieces = list()
assert isinstance(self.cluster_id, str_or_bytes),\
'A non-string value was supplied for self.cluster_id'
data.encode_short_string(pieces, self.cluster_id)
return pieces
class Ack(amqp_object.Method):
INDEX = 0x003C0050 # 60, 80; 3932240
NAME = 'Basic.Ack'
def __init__(self, delivery_tag=0, multiple=False):
self.delivery_tag = delivery_tag
self.multiple = multiple
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.delivery_tag = struct.unpack_from('>Q', encoded, offset)[0]
offset += 8
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.multiple = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>Q', self.delivery_tag))
bit_buffer = 0
if self.multiple:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class Reject(amqp_object.Method):
INDEX = 0x003C005A # 60, 90; 3932250
NAME = 'Basic.Reject'
def __init__(self, delivery_tag=None, requeue=True):
self.delivery_tag = delivery_tag
self.requeue = requeue
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.delivery_tag = struct.unpack_from('>Q', encoded, offset)[0]
offset += 8
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.requeue = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>Q', self.delivery_tag))
bit_buffer = 0
if self.requeue:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class RecoverAsync(amqp_object.Method):
INDEX = 0x003C0064 # 60, 100; 3932260
NAME = 'Basic.RecoverAsync'
def __init__(self, requeue=False):
self.requeue = requeue
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.requeue = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
bit_buffer = 0
if self.requeue:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class Recover(amqp_object.Method):
INDEX = 0x003C006E # 60, 110; 3932270
NAME = 'Basic.Recover'
def __init__(self, requeue=False):
self.requeue = requeue
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.requeue = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
bit_buffer = 0
if self.requeue:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class RecoverOk(amqp_object.Method):
INDEX = 0x003C006F # 60, 111; 3932271
NAME = 'Basic.RecoverOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Nack(amqp_object.Method):
INDEX = 0x003C0078 # 60, 120; 3932280
NAME = 'Basic.Nack'
def __init__(self, delivery_tag=0, multiple=False, requeue=True):
self.delivery_tag = delivery_tag
self.multiple = multiple
self.requeue = requeue
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
self.delivery_tag = struct.unpack_from('>Q', encoded, offset)[0]
offset += 8
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.multiple = (bit_buffer & (1 << 0)) != 0
self.requeue = (bit_buffer & (1 << 1)) != 0
return self
def encode(self):
pieces = list()
pieces.append(struct.pack('>Q', self.delivery_tag))
bit_buffer = 0
if self.multiple:
bit_buffer |= 1 << 0
if self.requeue:
bit_buffer |= 1 << 1
pieces.append(struct.pack('B', bit_buffer))
return pieces
class Tx(amqp_object.Class):
INDEX = 0x005A # 90
NAME = 'Tx'
class Select(amqp_object.Method):
INDEX = 0x005A000A # 90, 10; 5898250
NAME = 'Tx.Select'
def __init__(self):
pass
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class SelectOk(amqp_object.Method):
INDEX = 0x005A000B # 90, 11; 5898251
NAME = 'Tx.SelectOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Commit(amqp_object.Method):
INDEX = 0x005A0014 # 90, 20; 5898260
NAME = 'Tx.Commit'
def __init__(self):
pass
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class CommitOk(amqp_object.Method):
INDEX = 0x005A0015 # 90, 21; 5898261
NAME = 'Tx.CommitOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Rollback(amqp_object.Method):
INDEX = 0x005A001E # 90, 30; 5898270
NAME = 'Tx.Rollback'
def __init__(self):
pass
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class RollbackOk(amqp_object.Method):
INDEX = 0x005A001F # 90, 31; 5898271
NAME = 'Tx.RollbackOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class Confirm(amqp_object.Class):
INDEX = 0x0055 # 85
NAME = 'Confirm'
class Select(amqp_object.Method):
INDEX = 0x0055000A # 85, 10; 5570570
NAME = 'Confirm.Select'
def __init__(self, nowait=False):
self.nowait = nowait
@property
def synchronous(self):
return True
def decode(self, encoded, offset=0):
bit_buffer = struct.unpack_from('B', encoded, offset)[0]
offset += 1
self.nowait = (bit_buffer & (1 << 0)) != 0
return self
def encode(self):
pieces = list()
bit_buffer = 0
if self.nowait:
bit_buffer |= 1 << 0
pieces.append(struct.pack('B', bit_buffer))
return pieces
class SelectOk(amqp_object.Method):
INDEX = 0x0055000B # 85, 11; 5570571
NAME = 'Confirm.SelectOk'
def __init__(self):
pass
@property
def synchronous(self):
return False
def decode(self, encoded, offset=0):
return self
def encode(self):
pieces = list()
return pieces
class BasicProperties(amqp_object.Properties):
CLASS = Basic
INDEX = 0x003C # 60
NAME = 'BasicProperties'
FLAG_CONTENT_TYPE = (1 << 15)
FLAG_CONTENT_ENCODING = (1 << 14)
FLAG_HEADERS = (1 << 13)
FLAG_DELIVERY_MODE = (1 << 12)
FLAG_PRIORITY = (1 << 11)
FLAG_CORRELATION_ID = (1 << 10)
FLAG_REPLY_TO = (1 << 9)
FLAG_EXPIRATION = (1 << 8)
FLAG_MESSAGE_ID = (1 << 7)
FLAG_TIMESTAMP = (1 << 6)
FLAG_TYPE = (1 << 5)
FLAG_USER_ID = (1 << 4)
FLAG_APP_ID = (1 << 3)
FLAG_CLUSTER_ID = (1 << 2)
def __init__(self, content_type=None, content_encoding=None, headers=None, delivery_mode=None, priority=None, correlation_id=None, reply_to=None, expiration=None, message_id=None, timestamp=None, type=None, user_id=None, app_id=None, cluster_id=None):
self.content_type = content_type
self.content_encoding = content_encoding
self.headers = headers
if isinstance(delivery_mode, DeliveryMode):
self.delivery_mode = delivery_mode.value
else:
self.delivery_mode = delivery_mode
self.priority = priority
self.correlation_id = correlation_id
self.reply_to = reply_to
self.expiration = expiration
self.message_id = message_id
self.timestamp = timestamp
self.type = type
self.user_id = user_id
self.app_id = app_id
self.cluster_id = cluster_id
def decode(self, encoded, offset=0):
flags = 0
flagword_index = 0
while True:
partial_flags = struct.unpack_from('>H', encoded, offset)[0]
offset += 2
flags = flags | (partial_flags << (flagword_index * 16))
if not (partial_flags & 1):
break
flagword_index += 1
if flags & BasicProperties.FLAG_CONTENT_TYPE:
self.content_type, offset = data.decode_short_string(encoded, offset)
else:
self.content_type = None
if flags & BasicProperties.FLAG_CONTENT_ENCODING:
self.content_encoding, offset = data.decode_short_string(encoded, offset)
else:
self.content_encoding = None
if flags & BasicProperties.FLAG_HEADERS:
(self.headers, offset) = data.decode_table(encoded, offset)
else:
self.headers = None
if flags & BasicProperties.FLAG_DELIVERY_MODE:
self.delivery_mode = struct.unpack_from('B', encoded, offset)[0]
offset += 1
else:
self.delivery_mode = None
if flags & BasicProperties.FLAG_PRIORITY:
self.priority = struct.unpack_from('B', encoded, offset)[0]
offset += 1
else:
self.priority = None
if flags & BasicProperties.FLAG_CORRELATION_ID:
self.correlation_id, offset = data.decode_short_string(encoded, offset)
else:
self.correlation_id = None
if flags & BasicProperties.FLAG_REPLY_TO:
self.reply_to, offset = data.decode_short_string(encoded, offset)
else:
self.reply_to = None
if flags & BasicProperties.FLAG_EXPIRATION:
self.expiration, offset = data.decode_short_string(encoded, offset)
else:
self.expiration = None
if flags & BasicProperties.FLAG_MESSAGE_ID:
self.message_id, offset = data.decode_short_string(encoded, offset)
else:
self.message_id = None
if flags & BasicProperties.FLAG_TIMESTAMP:
self.timestamp = struct.unpack_from('>Q', encoded, offset)[0]
offset += 8
else:
self.timestamp = None
if flags & BasicProperties.FLAG_TYPE:
self.type, offset = data.decode_short_string(encoded, offset)
else:
self.type = None
if flags & BasicProperties.FLAG_USER_ID:
self.user_id, offset = data.decode_short_string(encoded, offset)
else:
self.user_id = None
if flags & BasicProperties.FLAG_APP_ID:
self.app_id, offset = data.decode_short_string(encoded, offset)
else:
self.app_id = None
if flags & BasicProperties.FLAG_CLUSTER_ID:
self.cluster_id, offset = data.decode_short_string(encoded, offset)
else:
self.cluster_id = None
return self
def encode(self):
pieces = list()
flags = 0
if self.content_type is not None:
flags = flags | BasicProperties.FLAG_CONTENT_TYPE
assert isinstance(self.content_type, str_or_bytes),\
'A non-string value was supplied for self.content_type'
data.encode_short_string(pieces, self.content_type)
if self.content_encoding is not None:
flags = flags | BasicProperties.FLAG_CONTENT_ENCODING
assert isinstance(self.content_encoding, str_or_bytes),\
'A non-string value was supplied for self.content_encoding'
data.encode_short_string(pieces, self.content_encoding)
if self.headers is not None:
flags = flags | BasicProperties.FLAG_HEADERS
data.encode_table(pieces, self.headers)
if self.delivery_mode is not None:
flags = flags | BasicProperties.FLAG_DELIVERY_MODE
pieces.append(struct.pack('B', self.delivery_mode))
if self.priority is not None:
flags = flags | BasicProperties.FLAG_PRIORITY
pieces.append(struct.pack('B', self.priority))
if self.correlation_id is not None:
flags = flags | BasicProperties.FLAG_CORRELATION_ID
assert isinstance(self.correlation_id, str_or_bytes),\
'A non-string value was supplied for self.correlation_id'
data.encode_short_string(pieces, self.correlation_id)
if self.reply_to is not None:
flags = flags | BasicProperties.FLAG_REPLY_TO
assert isinstance(self.reply_to, str_or_bytes),\
'A non-string value was supplied for self.reply_to'
data.encode_short_string(pieces, self.reply_to)
if self.expiration is not None:
flags = flags | BasicProperties.FLAG_EXPIRATION
assert isinstance(self.expiration, str_or_bytes),\
'A non-string value was supplied for self.expiration'
data.encode_short_string(pieces, self.expiration)
if self.message_id is not None:
flags = flags | BasicProperties.FLAG_MESSAGE_ID
assert isinstance(self.message_id, str_or_bytes),\
'A non-string value was supplied for self.message_id'
data.encode_short_string(pieces, self.message_id)
if self.timestamp is not None:
flags = flags | BasicProperties.FLAG_TIMESTAMP
pieces.append(struct.pack('>Q', self.timestamp))
if self.type is not None:
flags = flags | BasicProperties.FLAG_TYPE
assert isinstance(self.type, str_or_bytes),\
'A non-string value was supplied for self.type'
data.encode_short_string(pieces, self.type)
if self.user_id is not None:
flags = flags | BasicProperties.FLAG_USER_ID
assert isinstance(self.user_id, str_or_bytes),\
'A non-string value was supplied for self.user_id'
data.encode_short_string(pieces, self.user_id)
if self.app_id is not None:
flags = flags | BasicProperties.FLAG_APP_ID
assert isinstance(self.app_id, str_or_bytes),\
'A non-string value was supplied for self.app_id'
data.encode_short_string(pieces, self.app_id)
if self.cluster_id is not None:
flags = flags | BasicProperties.FLAG_CLUSTER_ID
assert isinstance(self.cluster_id, str_or_bytes),\
'A non-string value was supplied for self.cluster_id'
data.encode_short_string(pieces, self.cluster_id)
flag_pieces = list()
while True:
remainder = flags >> 16
partial_flags = flags & 0xFFFE
if remainder != 0:
partial_flags |= 1
flag_pieces.append(struct.pack('>H', partial_flags))
flags = remainder
if not flags:
break
return flag_pieces + pieces
methods = {
0x000A000A: Connection.Start,
0x000A000B: Connection.StartOk,
0x000A0014: Connection.Secure,
0x000A0015: Connection.SecureOk,
0x000A001E: Connection.Tune,
0x000A001F: Connection.TuneOk,
0x000A0028: Connection.Open,
0x000A0029: Connection.OpenOk,
0x000A0032: Connection.Close,
0x000A0033: Connection.CloseOk,
0x000A003C: Connection.Blocked,
0x000A003D: Connection.Unblocked,
0x0014000A: Channel.Open,
0x0014000B: Channel.OpenOk,
0x00140014: Channel.Flow,
0x00140015: Channel.FlowOk,
0x00140028: Channel.Close,
0x00140029: Channel.CloseOk,
0x001E000A: Access.Request,
0x001E000B: Access.RequestOk,
0x0028000A: Exchange.Declare,
0x0028000B: Exchange.DeclareOk,
0x00280014: Exchange.Delete,
0x00280015: Exchange.DeleteOk,
0x0028001E: Exchange.Bind,
0x0028001F: Exchange.BindOk,
0x00280028: Exchange.Unbind,
0x00280033: Exchange.UnbindOk,
0x0032000A: Queue.Declare,
0x0032000B: Queue.DeclareOk,
0x00320014: Queue.Bind,
0x00320015: Queue.BindOk,
0x0032001E: Queue.Purge,
0x0032001F: Queue.PurgeOk,
0x00320028: Queue.Delete,
0x00320029: Queue.DeleteOk,
0x00320032: Queue.Unbind,
0x00320033: Queue.UnbindOk,
0x003C000A: Basic.Qos,
0x003C000B: Basic.QosOk,
0x003C0014: Basic.Consume,
0x003C0015: Basic.ConsumeOk,
0x003C001E: Basic.Cancel,
0x003C001F: Basic.CancelOk,
0x003C0028: Basic.Publish,
0x003C0032: Basic.Return,
0x003C003C: Basic.Deliver,
0x003C0046: Basic.Get,
0x003C0047: Basic.GetOk,
0x003C0048: Basic.GetEmpty,
0x003C0050: Basic.Ack,
0x003C005A: Basic.Reject,
0x003C0064: Basic.RecoverAsync,
0x003C006E: Basic.Recover,
0x003C006F: Basic.RecoverOk,
0x003C0078: Basic.Nack,
0x005A000A: Tx.Select,
0x005A000B: Tx.SelectOk,
0x005A0014: Tx.Commit,
0x005A0015: Tx.CommitOk,
0x005A001E: Tx.Rollback,
0x005A001F: Tx.RollbackOk,
0x0055000A: Confirm.Select,
0x0055000B: Confirm.SelectOk
}
props = {
0x003C: BasicProperties
}
def has_content(methodNumber):
return methodNumber in (
Basic.Publish.INDEX,
Basic.Return.INDEX,
Basic.Deliver.INDEX,
Basic.GetOk.INDEX,
)
| 33.790968 | 255 | 0.57217 |
fd941f9d46aa8a6a9d851c3edb83c0145cb00a50 | 1,589 | py | Python | more/jinja2/tests/test_jinja2.py | sgaist/more.jinja2 | 8f6fb3034b7f15a4d8de9f0891b06bff6859f9df | [
"BSD-3-Clause"
] | 4 | 2015-05-16T19:48:34.000Z | 2019-06-23T20:19:31.000Z | more/jinja2/tests/test_jinja2.py | sgaist/more.jinja2 | 8f6fb3034b7f15a4d8de9f0891b06bff6859f9df | [
"BSD-3-Clause"
] | 10 | 2015-01-07T14:12:58.000Z | 2020-10-02T14:54:44.000Z | more/jinja2/tests/test_jinja2.py | sgaist/more.jinja2 | 8f6fb3034b7f15a4d8de9f0891b06bff6859f9df | [
"BSD-3-Clause"
] | 5 | 2015-05-15T22:15:39.000Z | 2020-10-02T13:51:20.000Z | from webtest import TestApp as Client
from .fixtures import (
template,
template_inheritance,
override_template,
override_template_inheritance,
)
def test_template():
c = Client(template.App())
response = c.get("/persons/world")
assert (
response.body
== b"""\
<html>
<body>
<p>Hello world!</p>
</body>
</html>"""
)
def test_override_template():
c = Client(override_template.App())
response = c.get("/persons/world")
assert (
response.body
== b"""\
<html>
<body>
<p>Hello world!</p>
</body>
</html>"""
)
c = Client(override_template.SubApp())
response = c.get("/persons/world")
assert (
response.body
== b"""\
<html>
<body>
<p>Hi world!</p>
</body>
</html>"""
)
def test_template_inheritance():
c = Client(template_inheritance.App())
response = c.get("/persons/world")
assert (
response.body
== b"""\
<html>
<head>
</head>
<body>
<div id="content">
<p>Hello world!</p>
</div>
</body>
</html>"""
)
def test_override_template_inheritance():
c = Client(override_template_inheritance.App())
response = c.get("/persons/world")
assert (
response.body
== b"""\
<html>
<head>
</head>
<body>
<div id="content">
<p>Hello world!</p>
</div>
</body>
</html>"""
)
c = Client(override_template_inheritance.SubApp())
response = c.get("/persons/world")
assert (
response.body
== b"""\
<html>
<head>
</head>
<body>
<div id="content2">
<p>Hello world!</p>
</div>
</body>
</html>"""
)
| 14.990566 | 54 | 0.572058 |
f1db51c82ce93f4168729180ad3c48604c36fff2 | 820 | py | Python | login/forms.py | hpathipati/Quick-Tutor | 17476d79b87f51b12a6c8fc435d1a6506bff1e04 | [
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null | login/forms.py | hpathipati/Quick-Tutor | 17476d79b87f51b12a6c8fc435d1a6506bff1e04 | [
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null | login/forms.py | hpathipati/Quick-Tutor | 17476d79b87f51b12a6c8fc435d1a6506bff1e04 | [
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class CustomUserCreationForm(UserCreationForm):
first_name = forms.CharField(max_length=20, required=True)
last_name = forms.CharField(max_length=20, required=True)
email = forms.EmailField(required=True)
class Meta:
model = User
fields = ("username", "first_name",
"last_name", "email", "password1", "password2")
def save(self, commit=True):
user = super(CustomUserCreationForm, self).save(commit=False)
user.first_name = self.cleaned_data["first_name"]
user.last_name = self.cleaned_data["last_name"]
user.email = self.cleaned_data["email"]
if commit:
user.save()
return user
| 34.166667 | 69 | 0.676829 |
2bd7ec2799c77b8cfc2c39ff7d65a35aa592cfd2 | 2,229 | py | Python | lib/surface/compute/packet_mirrorings/delete.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 2 | 2019-11-10T09:17:07.000Z | 2019-12-18T13:44:08.000Z | lib/surface/compute/packet_mirrorings/delete.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | null | null | null | lib/surface/compute/packet_mirrorings/delete.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 1 | 2020-07-25T01:40:19.000Z | 2020-07-25T01:40:19.000Z | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for deleting packet mirroring resources."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute.packet_mirrorings import client
from googlecloudsdk.command_lib.compute.packet_mirrorings import flags
class Delete(base.DeleteCommand):
"""Delete a Compute Engine packet mirroring policy."""
PACKET_MIRRORING_ARG = None
@classmethod
def Args(cls, parser):
cls.PACKET_MIRRORING_ARG = flags.PacketMirroringArgument(plural=True)
cls.PACKET_MIRRORING_ARG.AddArgument(parser, operation_type='delete')
parser.display_info.AddCacheUpdater(flags.PacketMirroringCompleter)
def Collection(self):
return 'compute.packetMirrorings'
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
refs = self.PACKET_MIRRORING_ARG.ResolveAsResource(args, holder.resources)
utils.PromptForDeletion(refs)
requests = []
for ref in refs:
packet_mirroring = client.PacketMirroring(
ref, compute_client=holder.client)
requests.append(packet_mirroring.MakeDeleteRequestTuple())
return holder.client.MakeRequests(requests)
Delete.detailed_help = {
'DESCRIPTION': 'Delete a Compute Engine Packet Mirroring policy.',
'EXAMPLES':
"""\
Delete the Packet Mirroring policy pm-1 in region us-central1.
$ {command} pm-1
--region us-central1
"""
}
| 33.268657 | 78 | 0.762225 |
a82dbdde3f3223ce8093dd0dbe850283d773946d | 1,838 | py | Python | core-python-basic-knowledge/built-in-collections/dictionaries.py | hassonor/core-python | 92672aa72c1474061df5247a2dd4dfd9fab1642a | [
"MIT"
] | 1 | 2022-03-09T20:58:33.000Z | 2022-03-09T20:58:33.000Z | core-python-basic-knowledge/built-in-collections/dictionaries.py | hassonor/core-python | 92672aa72c1474061df5247a2dd4dfd9fab1642a | [
"MIT"
] | null | null | null | core-python-basic-knowledge/built-in-collections/dictionaries.py | hassonor/core-python | 92672aa72c1474061df5247a2dd4dfd9fab1642a | [
"MIT"
] | null | null | null | # Dictionaries
"""
Dictionary internals
Keys -> Must be immutable
Values -> May be mutable
As with lists, dictionary copying is shallow (by default)
"""
names_and_ages = [('Alice', 32), ('Bob', 48), ('Charlie', 50)]
d = dict(names_and_ages)
print(d)
phonetic = dict(a='alfa', b='bravo', c='charlie', d='delta', e='echo', f='foxtrot')
print(phonetic)
d = dict(goldenrod=0xDaa520, indigo=0x4B0082, seashell=0xFFF5EE)
e = d.copy()
print(e)
f = dict(e)
print(f)
"""
dict.update()
Adds entries from one dictionary into another.
Call this on the dictionary that is to be updated.
"""
g = dict(whear=0xF5DEB3, khaki=0xF0E68C, crimson=0xDC143C)
f.update(g)
print(f)
stocks = {'GOOG': 900, 'AAPL': 420, 'IBM': 204}
stocks.update({'GOOG': 920, 'YHOO': 34})
print(stocks)
"""
Dictionary iteration
--------------------
Dictionaries yield the next key on each iteration.
Values can be retrieved using the square-bracket operator.
"""
for key in stocks:
print(f"{key} => {stocks[key]}")
for value in stocks.values():
print(value)
for key in stocks.keys():
print(key)
"""
dict.items()
Iterates over keys and values in tandem.
Yields a (key, value) tuple on each iteration.
"""
for key, value in stocks.items():
print(f"{key} => {value}")
z = {'H': 1, 'Tc': 43, 'Xe': 44}
print(z)
del z['H']
print(z)
m = {'H': [1, 2, 3], 'Tc': [43, 33, 23], 'Xe': [44]}
print(m)
m['H'] += [4, 5, 6, 7]
print(m)
from pprint import pprint as pp
pp(m)
"""
Summary:
-> Dictionaries map from keys to values.
-> Iteration and membership in dictionaries are over keys.
-> Do not assume any order when iterating dictionary keys.
-> dict.keys(), dict.values(), and dict.items() are iterable views into dictionaries.
-> Copy dictionaries with dict.copy() or the dict constructor.
-> Use dict.update() extend one dictionary with another.
"""
| 20.651685 | 85 | 0.661045 |
793977499297aed0f218ba21656fe03f7f6eb554 | 4,114 | py | Python | scripts/fractal-tree.py | dogerish/dogerbot | f148d832ae10677cd7e1b822f8a7dc5676c05903 | [
"MIT"
] | 1 | 2020-12-07T04:05:33.000Z | 2020-12-07T04:05:33.000Z | scripts/fractal-tree.py | dogerish/dogerbot | f148d832ae10677cd7e1b822f8a7dc5676c05903 | [
"MIT"
] | null | null | null | scripts/fractal-tree.py | dogerish/dogerbot | f148d832ae10677cd7e1b822f8a7dc5676c05903 | [
"MIT"
] | null | null | null | #! /usr/bin/python3
speedtest = False
from math import sin, cos, pi
from random import randrange
if speedtest: from time import time
import pygame, sys
if speedtest: start = time()
noneargs = (None, "", ' ', "None", "N/A", "n/a", '/', '-')
default = lambda arg, d: arg if arg not in noneargs else d
arglist = "out, color, bgcolor, angInc, lenInc, maxBranch, res, initlen"
if len(sys.argv) > 1 and sys.argv[1] == "help":
print(arglist)
quit()
def hexstr(string):
if type(string) == int: return string
if string.startswith('#'):
return eval(f'0x{string[1:]}')
elif string.startswith('0x'):
return eval(f'0x{string[2:]}')
else:
return eval(f'0x{string}')
def getit(var, d, func=lambda x: x):
var = default(var, d)
if var == d: return var
try:
return func(var)
except: return d
def get(out=None, color=None, bgcolor=None, angInc=None, lenInc=None, maxBranch=None, res=None, initlen=None, *catchall):
out = getit(out, 'saved_img.png')
color = getit(color, 0x8000FF, hexstr)
bgcolor = getit(bgcolor, 0, hexstr)
angInc = getit(angInc, pi/6, lambda x: float(x)*pi/180)
lenInc = getit(lenInc, 3/4, lambda x: abs(float(x)))
maxBranch = getit(maxBranch, 15, lambda x: min(abs(int(x)), 16))
res = getit(res, [1000], lambda x: [min(abs(int(default(r, 0))), 2500) for r in x.split('x')])
initlen = getit(initlen, None, float)
return eval('(' + arglist + ')')
def trim(x, y, w, h, super_w, super_h):
x, y = min(max(x, 0), super_w), min(max(y, 0), super_h)
return x, y, min(w, super_w - x), min(h, super_h - y)
exec(arglist + ' = get(*sys.argv[1:])')
if speedtest:
args_set = [time()]
args_set.append(args_set[0] - start)
ratio = 966/746
if res[0] == 0 and res[1] == 0:
res[0] = 1000
xv = round(res[0]/ratio)
if len(res) == 1:
res.append(xv)
elif res[1] == 0:
res[1] = xv
if len(res) >= 2 and res[0] == 0:
res[0] = round(res[1]*ratio)
elif res[0] != 0 and len(res) >= 2:
initlen = 200
if initlen == None: initlen = res[0]/4.8
# y-value of the ground
ground = res[1]
# initial branch size
# rotates the point by a radians
rotate = lambda pos, r, a: (cos(a)*r + pos[0], sin(a)*r + pos[1])
if speedtest:
res_set = [time()]
res_set.append(res_set[0] - args_set[0])
surface = pygame.Surface(res)
points_x = []
points_y = []
def add(point):
points_x.append(point[0])
points_y.append(point[1])
draw = lambda point1, point2: pygame.draw.line(surface, color, point1, point2)
if speedtest:
surf_set = [time()]
surf_set.append(surf_set[0] - res_set[0])
base = res[0]/2
base2 = res[0]
# draws a branch starting at (x, y), with the length l, at the angle a, with the color c
def branch(pos, l, a, branches):
if branches == maxBranch: return add(pos)
newPos = rotate(pos, l, a)
draw(pos, newPos)
draw((base2 - pos[0], pos[1]), (base2 - newPos[0], newPos[1]))
branch(newPos, l*lenInc, a + angInc, branches + 1)
if branches != 0: branch(newPos, l*lenInc, a - angInc, branches + 1)
if bgcolor != 0: surface.fill(bgcolor)
if speedtest:
ready_set = [time()]
ready_set.append(ready_set[0] - surf_set[0])
branch((base, ground), initlen, -pi/2, 0)
if speedtest:
drawn = [time()]
drawn.append(drawn[0] - ready_set[0])
maxi = max(points_x)
coords = [base2 - maxi, min(points_y)]
coords += [max(maxi - coords[0], 1), max(ground - coords[1], 1)]
if speedtest:
coords_set = [time()]
coords_set.append(coords_set[0] - drawn[0])
area = pygame.Rect(*trim(*coords, surface.get_width(), surface.get_height()))
if speedtest:
area_set = [time()]
area_set.append(area_set[0] - coords_set[0])
pygame.image.save(surface.subsurface(area), out)
if speedtest:
saved = [time()]
saved.append(saved[0] - area_set[0])
if speedtest:
print(f"""
ARGS: {args_set[1]*1000}ms
RES: {res_set[1]*1000}ms
SURF: {surf_set[1]*1000}ms
READY: {ready_set[1]*1000}ms
DRAWN: {drawn[1]*1000}ms
COORDS: {coords_set[1]*1000}ms
AREA: {area_set[1]*1000}ms
SAVED: {saved[1]*1000}ms
""")
print(out)
| 30.474074 | 121 | 0.623238 |
bee9f8c635784361f9852b7daae992dd6d892199 | 7,460 | py | Python | ros/src/tl_detector/tl_detector.py | yufeiliubrown/carnd_clothoid | 0e6c008ca7f4d91a845c35ca2d925bcb8e21ac94 | [
"MIT"
] | 1 | 2018-07-20T04:13:04.000Z | 2018-07-20T04:13:04.000Z | ros/src/tl_detector/tl_detector.py | yufeiliubrown/carnd_clothoid | 0e6c008ca7f4d91a845c35ca2d925bcb8e21ac94 | [
"MIT"
] | null | null | null | ros/src/tl_detector/tl_detector.py | yufeiliubrown/carnd_clothoid | 0e6c008ca7f4d91a845c35ca2d925bcb8e21ac94 | [
"MIT"
] | 5 | 2018-07-15T22:21:06.000Z | 2018-08-10T11:49:39.000Z | #!/usr/bin/env python
import rospy
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, Pose
from styx_msgs.msg import TrafficLightArray, TrafficLight
from styx_msgs.msg import Lane
from sensor_msgs.msg import Image
from cv_bridge import CvBridge
from light_classification.tl_classifier import TLClassifier
import tf
import cv2
import yaml
from scipy.spatial import KDTree
STATE_COUNT_THRESHOLD = 3
class TLDetector(object):
def __init__(self):
rospy.init_node('tl_detector')
self.pose = None
self.waypoints = None
self.camera_image = None
self.lights = []
self.waypoints_2d = None
self.waypoint_tree = None
self.is_classifier = True
sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
'''
/vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
helps you acquire an accurate ground truth data source for the traffic light
classifier by sending the current color state of all traffic lights in the
simulator. When testing on the vehicle, the color state will not be available. You'll need to
rely on the position of the light and the camera image to predict it.
'''
sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray, self.traffic_cb)
sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)
config_string = rospy.get_param("/traffic_light_config")
self.config = yaml.load(config_string)
self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint', Int32, queue_size=1)
self.bridge = CvBridge()
self.is_site = self.config["is_site"]
if(self.is_classifier):
self.light_classifier = TLClassifier(self.is_site)
rospy.loginfo('classifer initialized')
self.listener = tf.TransformListener()
self.state = TrafficLight.UNKNOWN
self.last_state = TrafficLight.UNKNOWN
self.last_wp = -1
self.state_count = 0
rospy.spin()
def pose_cb(self, msg):
self.pose = msg
def waypoints_cb(self, waypoints):
self.waypoints = waypoints
if not self.waypoints_2d:
self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]
self.waypoint_tree = KDTree(self.waypoints_2d)
def traffic_cb(self, msg):
self.lights = msg.lights
#rospy.loginfo("lights array", self.lights)
def image_cb(self, msg):
"""Identifies red lights in the incoming camera image and publishes the index
of the waypoint closest to the red light's stop line to /traffic_waypoint
Args:
msg (Image): image from car-mounted camera
"""
self.has_image = True
self.camera_image = msg
light_wp, state = self.process_traffic_lights()
'''
Publish upcoming red lights at camera frequency.
Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
of times till we start using it. Otherwise the previous stable state is
used.
'''
if self.state != state:
#rospy.loginfo('TL state', self.state)
self.state_count = 0
self.state = state
elif self.state_count >= STATE_COUNT_THRESHOLD:
self.last_state = self.state
light_wp = light_wp if state == TrafficLight.RED else -1
self.last_wp = light_wp
self.upcoming_red_light_pub.publish(Int32(light_wp))
else:
self.upcoming_red_light_pub.publish(Int32(self.last_wp))
self.state_count += 1
#rospy.loginfo('TL counter count:', self.state_count)
def get_closest_waypoint(self, pose):
"""Identifies the closest path waypoint to the given position
https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
Args:
pose (Pose): position to match a waypoint to
Returns:
int: index of the closest waypoint in self.waypoints
"""
#TODO implement
x = pose.position.x
y = pose.position.y
closest_idx = self.waypoint_tree.query([x,y],1)[1]
return closest_idx
def get_light_state(self, light):
"""Determines the current color of the traffic light
Args:
light (TrafficLight): light to classify
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
# FIXME: This is only for simulator needs to be removed once the classifier code is integrator
# For Testing, just return the light state
if(self.is_classifier == False):
return light.state
else:
#FIXME: Uncomment following code once Classifier code is integrator
if(not self.has_image):
self.prev_light_loc = None
return False
cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "rgb8")
#Get classification
prediction = self.light_classifier.get_classification(cv_image)
#rospy.loginfo("light.state= %s, Prediction %s", light.state, self.light_classifier.get_classification(cv_image))
#rospy.loginfo("Light State Predicted %s", self.light_classifier.get_classification(cv_image))
return self.light_classifier.get_classification(cv_image)
def process_traffic_lights(self):
"""Finds closest visible traffic light, if one exists, and determines its
location and color
Returns:
int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
#light = None
closest_light = None
line_wp_idx = None
# List of positions that correspond to the line to stop in front of for a given intersection
stop_line_positions = self.config['stop_line_positions']
if(self.pose):
car_wp_idx = self.get_closest_waypoint(self.pose.pose)
#TODO find the closest visible traffic light (if one exists)
diff = len(self.waypoints.waypoints)
for i, light in enumerate(self.lights):
# Get stop line waypoint index
line = stop_line_positions[i]
stopline_pose = Pose()
stopline_pose.position.x = line[0]
stopline_pose.position.y = line[1]
temp_wp_idx = self.get_closest_waypoint(stopline_pose)
# Find closest Stop line waypoint index
d = temp_wp_idx - car_wp_idx
if d >= 0 and d < diff:
diff = d
closest_light = light
line_wp_idx = temp_wp_idx
if closest_light:
state = self.get_light_state(closest_light)
return line_wp_idx, state
#self.waypoints = None
return -1, TrafficLight.UNKNOWN
if __name__ == '__main__':
try:
TLDetector()
except rospy.ROSInterruptException:
rospy.logerr('Could not start traffic node.')
| 37.676768 | 132 | 0.635255 |
13404112a5df27176a197511b092089ff3b23817 | 7,652 | py | Python | src/filetypes/text_x_python.py | suhailvs/grailbrowser | 7e0ea8d501759626d4186b371933a2493bf7ef58 | [
"CNRI-Jython"
] | 8 | 2015-02-18T18:50:50.000Z | 2022-03-15T22:21:03.000Z | src/filetypes/text_x_python.py | suhailvs/grailbrowser | 7e0ea8d501759626d4186b371933a2493bf7ef58 | [
"CNRI-Jython"
] | null | null | null | src/filetypes/text_x_python.py | suhailvs/grailbrowser | 7e0ea8d501759626d4186b371933a2493bf7ef58 | [
"CNRI-Jython"
] | 3 | 2016-04-04T23:54:07.000Z | 2020-10-29T04:25:42.000Z | """<OBJECT> handler for Python applets."""
__version__ = '$Revision: 1.5 $'
import grailutil
import re
import string
import Tkinter
import token
import AppletLoader
import sgml.HTMLParser
whitespace = '\\t\\n\x0b\x0c\\r '
def embed_text_x_python(parser, attrs):
"""<OBJECT> Handler for Python applets."""
extract = grailutil.extract_keyword
width = extract('width', attrs, conv=string.atoi)
height = extract('height', attrs, conv=string.atoi)
menu = extract('menu', attrs, conv=string.strip)
classid = extract('classid', attrs, conv=string.strip)
codebase = extract('codebase', attrs, conv=string.strip)
align = extract('align', attrs, 'baseline')
vspace = extract('vspace', attrs, 0, conv=string.atoi)
hspace = extract('hspace', attrs, 0, conv=string.atoi)
apploader = AppletLoader.AppletLoader(
parser, width=width, height=height, menu=menu,
classid=classid, codebase=codebase,
vspace=vspace, hspace=hspace, align=align, reload=parser.reload1)
if apploader.feasible():
return AppletEmbedding(apploader)
else:
apploader.close()
return None
class AppletEmbedding(sgml.HTMLParser.Embedding):
"""Applet interface for use with <OBJECT> / <PARAM> elements."""
def __init__(self, apploader):
self.__apploader = apploader
def param(self, name, value):
self.__apploader.set_param(name, value)
def end(self):
self.__apploader.go_for_it()
ws_width = re.compile("[%s]*" % whitespace).match
class parse_text_x_python:
def __init__(self, viewer, reload=0):
self.__viewer = viewer
self.__source = ''
viewer.new_font((None, 0, 0, 1))
def feed(self, data):
self.__source = self.__source + data
self.__viewer.send_literal_data(data)
IGNORED_TERMINALS = (
token.ENDMARKER, token.NEWLINE, token.INDENT, token.DEDENT)
__wanted_terminals = {}
for ntype in token.tok_name.keys():
if token.ISTERMINAL(ntype) and ntype not in IGNORED_TERMINALS:
__wanted_terminals[ntype] = ntype
def close(self):
self.show("Colorizing Python source text - parsing...")
import parser
try:
nodes = parser.ast2list(parser.suite(self.__source), 1)
except parser.ParserError, err:
self.__viewer.context.message(
"Syntax error in Python source: %s" % err)
return
self.setup_tags()
from types import IntType, ListType
ISTERMINAL = token.ISTERMINAL
wanted = self.__wanted_terminals.has_key
tag_add = self.tag_add = self.__viewer.text.tag_add
colorize = self.colorize
prevline, prevcol = 0, 0
sourcetext = string.split(self.__source, "\n")
sourcetext.insert(0, '')
self.show("Colorizing Python source text - coloring...")
steps = 0
while nodes:
steps = steps + 1
if not (steps % 2000): self.show()
node = nodes[0]
del nodes[0]
if type(node) is ListType:
ntype = node[0]
if wanted(ntype):
[ntype, nstr, lineno] = node
# The parser spits out the line number the token ENDS on,
# not the line it starts on!
if ntype == token.STRING and "\n" in nstr:
strlines = string.split(nstr, "\n")
endpos = lineno, len(strlines[-1]), sourcetext[lineno]
lineno = lineno - len(strlines) + 1
else:
endpos = ()
if prevline != lineno:
tag_add('python:comment',
"%d.%d" % (prevline, prevcol), "%d.0" % lineno)
prevcol = 0
prevline = lineno
sourceline = sourcetext[lineno]
match = ws_width(sourceline, prevcol)
if match:
prevcol = match.end()
colorize(ntype, nstr, lineno, prevcol)
# point prevline/prevcol to 1st char after token:
if endpos:
prevline, prevcol, sourceline = endpos
else:
prevcol = prevcol + len(nstr)
else:
nodes = node[1:] + nodes
# end of last token to EOF is a comment...
start = "%d.%d" % (prevline or 1, prevcol)
tag_add('python:comment', start, Tkinter.END)
self.__viewer.context.message_clear()
self.tag_add = None
def show(self, message=None):
if message:
self.__viewer.context.message(message)
self.__viewer.context.browser.root.update_idletasks()
# Each element in this table maps an identifier to a tuple of
# the tag it should be marked with and the tag the next token
# should be marked with (or None).
#
__keywords = {
# real keywords
'and': ('python:operator', None),
'break': ('python:control', None),
'class': ('python:define', 'python:class'),
'continue': ('python:control', None),
'def': ('python:define', 'python:def'),
'del': ('python:statement', None),
'elif': ('python:control', None),
'else': ('python:control', None),
'except': ('python:control', None),
'finally': ('python:control', None),
'for': ('python:control', None),
'from': ('python:statement', None),
'global': ('python:statement', None),
'if': ('python:control', None),
'import': ('python:statement', None),
'in': ('python:operator', None),
'is': ('python:operator', None),
'lambda': ('python:operator', None),
'not': ('python:operator', None),
'or': ('python:operator', None),
'pass': ('python:statement', None),
'print': ('python:statement', None),
'raise': ('python:control', None),
'return': ('python:control', None),
'try': ('python:control', None),
'while': ('python:control', None),
# others I'd like made special
'None': ('python:special', None),
}
import types
for name in dir(types):
if len(name) > 4 and name[-4:] == "Type":
__keywords[name] = ('python:special', None)
__next_tag = None
def colorize(self, ntype, nstr, lineno, colno):
"""Colorize a single token.
ntype
Node type. This is guaranteed to be a terminal token type
not listed in self.IGNORE_TERMINALS.
nstr
String containing the token, uninterpreted.
lineno
Line number (1-based) of the line on which the token starts.
colno
Index into the source line at which the token starts. <TAB>s
are not counted specially.
"""
start = "%d.%d" % (lineno, colno)
end = "%s + %d chars" % (start, len(nstr))
if self.__next_tag:
self.tag_add(self.__next_tag, start, end)
self.__next_tag = None
elif self.__keywords.has_key(nstr):
tag, self.__next_tag = self.__keywords[nstr]
self.tag_add(tag, start, end)
elif ntype == token.STRING:
qw = 1 # number of leading/trailing quotation
if nstr[0] == nstr[1]: # marks -- `quote width'
qw = 3
start = "%d.%d" % (lineno, colno + qw)
end = "%s + %d chars" % (start, len(nstr) - (2 * qw))
self.tag_add("python:string", start, end)
# Set foreground colors from this tag==>color table:
__foregrounds = {
'python:class': 'darkGreen',
'python:comment': 'mediumBlue',
'python:control': 'midnightBlue',
'python:def': 'saddleBrown',
'python:define': 'midnightBlue',
'python:operator': 'midnightBlue',
'python:special': 'darkGreen',
'python:statement': 'midnightBlue',
'python:string': 'steelblue4',
}
def setup_tags(self):
"""Configure the display tags associated with Python source coloring.
This is called only if the source is correctly parsed. All mapping
of logical tags to physical style is accomplished in this method.
"""
self.__viewer.configure_fonttag('_tt_b')
self.__viewer.configure_fonttag('_tt_i')
text = self.__viewer.text
boldfont = text.tag_cget('_tt_b', '-font')
italicfont = text.tag_cget('_tt_i', '-font')
text.tag_config('python:string', font=italicfont)
for tag in ('python:class', 'python:def', 'python:define'):
text.tag_config(tag, font=boldfont)
for tag, color in self.__foregrounds.items():
text.tag_config(tag, foreground=color)
| 31.751037 | 73 | 0.660481 |
15810fb8e0a741229da911333397c1b706d02ac1 | 143 | py | Python | test/test_doctest.py | WRY-learning/k3awssign | f25b42886e06e668b037058d01ae43a13be7ee70 | [
"MIT"
] | null | null | null | test/test_doctest.py | WRY-learning/k3awssign | f25b42886e06e668b037058d01ae43a13be7ee70 | [
"MIT"
] | 2 | 2021-11-10T18:17:06.000Z | 2022-03-23T06:48:42.000Z | test/test_doctest.py | WRY-learning/k3awssign | f25b42886e06e668b037058d01ae43a13be7ee70 | [
"MIT"
] | 2 | 2021-08-09T04:16:30.000Z | 2021-08-23T09:45:31.000Z | import doctest
import k3awssign
def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite(k3awssign))
return tests
| 15.888889 | 51 | 0.769231 |
7260a5ecf9d435d6b3e370c8ef439589d34eb8a9 | 1,079 | py | Python | main.py | bluecough/AMP_AD_Hostname_check | ba76543ffa4cef87d2e834cab0bcbfc460298d5a | [
"BSD-Source-Code"
] | null | null | null | main.py | bluecough/AMP_AD_Hostname_check | ba76543ffa4cef87d2e834cab0bcbfc460298d5a | [
"BSD-Source-Code"
] | null | null | null | main.py | bluecough/AMP_AD_Hostname_check | ba76543ffa4cef87d2e834cab0bcbfc460298d5a | [
"BSD-Source-Code"
] | null | null | null | ## AMP AD Hostname Check
## Check AD and then call AMP to see if the hostname is also in AMP
## Authors: George Seeto, Chantel Strickland, Dave Schwartberg
##
import sys, requests, json, urllib3
def main():
# Get the input args
client_id = sys.argv[1]
api_key = sys.argv[2]
csvfilename = sys.argv[3]
urllib3.disable_warnings()
host = "api.amp.cisco.com/v1"
url = f"https://{client_id}:{api_key}@{host}/computers?hostname[]="
# Read in CSV file
csvfile = open(csvfilename, 'r')
count = 0
for line in csvfile:
count += 1
line = line.strip('\n')
line = line.replace('"','')
get_amp_host(line, url)
def get_amp_host(name, url):
response = requests.get(url + name, verify=False)
decode_response = response.json()
try:
print(decode_response['data'][0]['hostname'] + " | " + decode_response['data'][0]['connector_guid'])
except:
print("name " + name + " not found")
if __name__ == '__main__':
main()
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
| 26.975 | 108 | 0.622799 |
a919a2bbb25a1cb933180444bb55d93ce116cef9 | 7,581 | py | Python | test/sources/test_merge.py | ChowNow/blingalytics | a05c866bc0dc7c56b5106c71c12cf10b37c5bae5 | [
"MIT"
] | null | null | null | test/sources/test_merge.py | ChowNow/blingalytics | a05c866bc0dc7c56b5106c71c12cf10b37c5bae5 | [
"MIT"
] | 1 | 2019-08-16T07:15:42.000Z | 2019-08-16T15:27:18.000Z | test/sources/test_merge.py | ChowNow/blingalytics | a05c866bc0dc7c56b5106c71c12cf10b37c5bae5 | [
"MIT"
] | 1 | 2017-06-01T23:32:08.000Z | 2017-06-01T23:32:08.000Z | from __future__ import absolute_import
from decimal import Decimal
import unittest
from blingalytics import widgets
from blingalytics.sources import merge
from mock import Mock
from test import entities, reports
class TestMergeSource(unittest.TestCase):
def setUp(self):
cache = Mock()
cache.instance_rows.return_value = [
{'_bling_id': 1, 'user_id': 1, 'user_is_active': True, 'num_widgets': 2, '_sum_widget_price': Decimal('6.00'), 'average_widget_price': Decimal('3.00')},
{'_bling_id': 2, 'user_id': 2, 'user_is_active': False, 'num_widgets': 10, '_sum_widget_price': Decimal('100.00'), 'average_widget_price': Decimal('10.00')},
]
self.report = reports.BasicMergeReport(cache)
def test_merge_source(self):
id1, id2 = entities.Compare(), entities.Compare()
self.report.clean_user_inputs(include='1', user_is_active='0')
source = merge.MergeSource(self.report)
self.assertEqual(list(source.get_rows([], self.report.clean_inputs)), [
((id1,), {'double_num_widgets': 4, 'user_id': 1, 'user_is_active': True}),
((id2,), {'double_num_widgets': 20, 'user_id': 2, 'user_is_active': False}),
])
self.report.clean_user_inputs(include='', user_is_active='0')
self.assertEqual(list(source.get_rows([], self.report.clean_inputs)), [
((id1,), {'double_num_widgets': 2, 'user_id': 1, 'user_is_active': True}),
((id2,), {'double_num_widgets': 10, 'user_id': 2, 'user_is_active': False}),
])
def test_merge_columns(self):
# Test basic merge column functionality, and Sum functionality
col = merge.Sum() # Should merge any columns with the column name given (second arg)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 4)
self.assertEqual(col._merge_report_column('report1', 'col3', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), None)
self.assertEqual(col._merge_report_column('report23883832', 'col2', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 6)
self.assertEqual(col._merge_report_column('report1', 'col1', {}, {'col1': 3, 'col2': 4}), 3)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': Decimal('1.45'), 'col2': 2}, {'col1': Decimal('2.01'), 'col2': 4}), Decimal('3.46'))
self.assertRaises(TypeError, col._merge_report_column, 'report1', 'col1', {'col1': 'string', 'col2': 2}, {'col1': Decimal('1.5'), 'col2': 4})
col = merge.Sum('col1') # Should merge col1 columns regardless of the second arg
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 4)
self.assertEqual(col._merge_report_column('report1', 'col3', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 3)
self.assertEqual(col._merge_report_column('report23883832', 'col1', {'col1': 1, 'col2': 2}, {'col2': 4}), 1)
self.assertEqual(col._merge_report_column('report1', 'col2', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 5)
col = merge.Sum('report1.col1') # Should merge only col1 from report1
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 4)
self.assertEqual(col._merge_report_column('report1', 'col2', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 5)
self.assertEqual(col._merge_report_column('report2', 'col1', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 1)
self.assertEqual(col._merge_report_column('report2', 'col2', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 2)
col = merge.Sum('report1.col1', 'report2.col2') # Should merge col1 from report1, col2 from report2
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 4)
self.assertEqual(col._merge_report_column('report1', 'col2', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 5)
self.assertEqual(col._merge_report_column('report2', 'col1', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 5)
self.assertEqual(col._merge_report_column('report2', 'col2', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 6)
self.assertEqual(col._merge_report_column('report3', 'col3', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), None)
# First
col = merge.First()
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': 1, 'col2': 2}, {'col1': 3, 'col2': 4}), 1)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': None, 'col2': 2}, {'col1': 3, 'col2': 4}), 3)
self.assertEqual(col._merge_report_column('report1', 'col1', {}, {'col1': 3, 'col2': 4}), 3)
# BoolAnd
col = merge.BoolAnd()
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': None, 'col2': 2}, {'col1': True, 'col2': 4}), True)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': True, 'col2': 2}, {'col1': None, 'col2': 4}), True)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': None, 'col2': 2}, {'col1': False, 'col2': 4}), False)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': True, 'col2': 2}, {'col1': False, 'col2': 4}), False)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': False, 'col2': 2}, {'col1': False, 'col2': 4}), False)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': None, 'col2': 2}, {'col1': None, 'col2': 4}), True)
# BoolOr
col = merge.BoolOr()
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': None, 'col2': 2}, {'col1': True, 'col2': 4}), True)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': True, 'col2': 2}, {'col1': None, 'col2': 4}), True)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': None, 'col2': 2}, {'col1': False, 'col2': 4}), False)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': True, 'col2': 2}, {'col1': False, 'col2': 4}), True)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': False, 'col2': 2}, {'col1': False, 'col2': 4}), False)
self.assertEqual(col._merge_report_column('report1', 'col1', {'col1': None, 'col2': 2}, {'col1': None, 'col2': 4}), False)
def test_merge_filters(self):
# PostFilter
fil = merge.PostFilter(lambda row: row['include'] in ('yes', 'please'))
self.assertEqual(fil.include_row({'include': 'yes', 'value': 2}, {}), True)
self.assertEqual(fil.include_row({'include': 'maybe so', 'value': 2}, {}), False)
self.assertRaises(KeyError, fil.include_row, {'value': 1, 'othervalue': 2}, {})
widget = widgets.Select(choices=((True, 'Include'), (False, 'Disclude')))
widget._name = 'widget'
fil = merge.PostFilter(lambda row, user_input: user_input, widget=widget)
self.assertEqual(fil.include_row({'value': 1}, {'widget': widget.clean(1)}), False)
# ReportFilter
widget = widgets.Checkbox()
widget._name = 'widget'
fil = merge.ReportFilter('report1', widget=widget)
self.assertEqual(fil.excluded_reports({'widget': widget.clean(True)}), [])
self.assertEqual(fil.excluded_reports({'widget': widget.clean(False)}), ['report1'])
# DelegatedFilter
del1 = Mock()
del2 = Mock()
fil = merge.DelegatedFilter(del1)
| 70.850467 | 169 | 0.612848 |
a09af369db74fada3f95f3737fdb9622679bab47 | 994 | py | Python | example/archive.py | brucebentley/SlackCleaner | 43ef0d32090452394b1eb7591611f95332957f91 | [
"MIT"
] | 1 | 2021-05-13T00:37:22.000Z | 2021-05-13T00:37:22.000Z | example/archive.py | brucebentley/slack_cleaner2 | 43ef0d32090452394b1eb7591611f95332957f91 | [
"MIT"
] | null | null | null | example/archive.py | brucebentley/slack_cleaner2 | 43ef0d32090452394b1eb7591611f95332957f91 | [
"MIT"
] | null | null | null | import os
from SlackCleaner import SlackCleaner, a_while_ago, is_not_pinned
from pymongo import MongoClient
from pymongo.errors import DuplicateKeyError
client = MongoClient()
s = SlackCleaner(os.environ['TOKEN'], 1)
db = client.slack_cleaner
f = is_not_pinned()
before = a_while_ago(months=2)
def archive(c):
archive = db[c.name]
_ = archive.create_index([('client_msg_id', 1)], unique=True)
count = 0
duplicates = 0
for msg in filter(f, c.msgs(before=before)):
if 'client_msg_id' not in msg.json:
msg.json['client_msg_id'] = str(msg.json['ts'])
try:
_ = archive.insert_one(msg.json)
count += 1
except DuplicateKeyError:
duplicates += 1
pass
s.log.info('%s archived %d, duplicates: %d', c.name, count, duplicates)
def delete(c):
with s.log.group(c.name):
for msg in filter(f, c.msgs(before=before)):
msg.delete()
for c in s.channels:
archive(c)
for c in s.groups:
archive(c)
for c in s.conversations:
delete(c)
| 22.088889 | 73 | 0.68008 |
e96a4c0de8d58c49ee4ef2637ae1916072943339 | 32,139 | py | Python | pymc3/tests/test_step.py | nokados/pymc3 | abed82345e2932b3a77fcc67894f7eb6bae8797c | [
"Apache-2.0"
] | null | null | null | pymc3/tests/test_step.py | nokados/pymc3 | abed82345e2932b3a77fcc67894f7eb6bae8797c | [
"Apache-2.0"
] | null | null | null | pymc3/tests/test_step.py | nokados/pymc3 | abed82345e2932b3a77fcc67894f7eb6bae8797c | [
"Apache-2.0"
] | null | null | null | import shutil
import tempfile
import sys
from .checks import close_to
from .models import (
simple_categorical,
mv_simple,
mv_simple_discrete,
mv_prior_simple,
simple_2model_continuous,
)
from pymc3.sampling import assign_step_methods, sample
from pymc3.parallel_sampling import ParallelSamplingError
from pymc3.exceptions import SamplingError
from pymc3.model import Model
from pymc3.step_methods import (
NUTS,
BinaryGibbsMetropolis,
CategoricalGibbsMetropolis,
Metropolis,
Slice,
CompoundStep,
NormalProposal,
MultivariateNormalProposal,
HamiltonianMC,
EllipticalSlice,
SMC,
DEMetropolis,
)
from pymc3.theanof import floatX
from pymc3.distributions import Binomial, Normal, Bernoulli, Categorical, Beta, HalfNormal
from numpy.testing import assert_array_almost_equal
import numpy as np
import numpy.testing as npt
import pytest
import theano
import theano.tensor as tt
from .helpers import select_by_precision
class TestStepMethods: # yield test doesn't work subclassing object
master_samples = {
Slice: np.array(
[
0.10233528,
0.40458486,
0.17329217,
0.46281232,
0.22556278,
1.52632836,
-0.27823807,
0.02539625,
1.02711735,
0.03686346,
-0.62841281,
-0.27125083,
0.31989505,
0.84031155,
-0.18949138,
1.60550262,
1.01375291,
-0.29742941,
0.35312738,
0.43363622,
1.18898078,
0.80063888,
0.38445644,
0.90184395,
1.69150017,
2.05452171,
-0.13334755,
1.61265408,
1.36579345,
1.3216292,
-0.59487037,
-0.34648927,
1.05107285,
0.42870305,
0.61552257,
0.55239884,
0.13929271,
0.26213809,
-0.2316028,
0.19711046,
1.42832629,
1.93641434,
-0.81142379,
-0.31059485,
-0.3189694,
1.43542534,
0.40311093,
1.63103768,
0.24034874,
0.33924866,
0.94951616,
0.71700185,
0.79273056,
-0.44569146,
1.91974783,
0.84673795,
1.12411833,
-0.83123811,
-0.54310095,
-0.00721347,
0.9925055,
1.04015058,
-0.34958074,
-0.14926302,
-0.47990225,
-0.75629446,
-0.95942067,
1.68179204,
1.20598073,
1.39675733,
1.22755935,
0.06728757,
1.05184231,
1.01126791,
-0.67327093,
0.21429651,
1.33730461,
-1.56174184,
-0.64348764,
0.98050636,
0.25923049,
0.58622631,
0.46589069,
1.44367347,
-0.43141573,
1.08293374,
-0.5563204,
1.46287904,
1.26019815,
0.52972104,
1.08792687,
1.10064358,
1.84881549,
0.91179647,
0.69316592,
-0.47657064,
2.22747063,
0.83388935,
0.84680716,
-0.10556406,
]
),
HamiltonianMC: np.array(
[
1.43583525,
1.43583525,
1.43583525,
-0.57415005,
0.91472062,
0.91472062,
0.36282799,
0.80991631,
0.84457253,
0.84457253,
-0.12651784,
-0.12651784,
0.39027088,
-0.22998424,
0.64337475,
0.64337475,
0.03504003,
1.2667789,
1.2667789,
0.34770874,
0.224319,
0.224319,
1.00416894,
0.46161403,
0.28217305,
0.28217305,
0.50327811,
0.50327811,
0.50327811,
0.50327811,
0.42335724,
0.42335724,
0.20336198,
0.20336198,
0.20336198,
0.16330229,
0.16330229,
-0.7332075,
1.04924226,
1.04924226,
0.39630439,
0.16481719,
0.16481719,
0.84146061,
0.83146709,
0.83146709,
0.32748059,
1.00918804,
1.00918804,
0.91034823,
1.31278027,
1.38222654,
1.38222654,
-0.32268814,
-0.32268814,
2.1866116,
1.21679252,
-0.15916878,
-0.15916878,
0.38958249,
0.38958249,
0.54971928,
0.05591406,
0.87712017,
0.87712017,
0.19409043,
0.19409043,
0.19409043,
0.40718849,
0.63399349,
0.35510353,
0.35510353,
0.47860847,
0.47860847,
0.69805772,
0.16686305,
0.16686305,
0.16686305,
0.04971251,
0.04971251,
-0.90052793,
-0.73203754,
1.02258958,
1.02258958,
-0.14144856,
-0.14144856,
1.43017486,
1.23202605,
1.23202605,
0.24442885,
0.78300516,
0.30494261,
0.30494261,
0.30494261,
-0.00596443,
1.31695235,
0.81375848,
0.81375848,
0.81375848,
1.91238675,
]
),
Metropolis: np.array(
[
1.62434536,
1.01258895,
0.4844172,
0.4844172,
0.4844172,
0.4844172,
0.4844172,
0.4844172,
0.4844172,
0.4844172,
0.31198899,
0.31198899,
0.31198899,
0.31198899,
1.21284494,
0.52911708,
0.261229,
0.79158447,
0.10441177,
-0.74079387,
-0.74079387,
-0.50637818,
-0.50637818,
-0.50637818,
-0.45557042,
-0.45557042,
-0.33541147,
0.28179164,
0.58196196,
0.22971211,
0.02081788,
0.60744107,
0.8930284,
0.8930284,
1.40595822,
1.10786538,
1.10786538,
1.10786538,
1.10786538,
-0.28863095,
-0.12859388,
0.74757504,
0.74757504,
0.74757504,
0.97766977,
0.97766977,
0.75534163,
0.55458356,
0.75288328,
0.87189193,
0.9937132,
0.9937132,
0.61842825,
0.61842825,
0.27457457,
0.31817143,
0.31817143,
0.31817143,
-0.77674042,
-0.60735798,
0.13319847,
-0.82050213,
-0.82050213,
-0.50534274,
-0.15479676,
-0.15479676,
-0.19349227,
-0.19349227,
-0.21810923,
-0.21810923,
-0.21810923,
1.0180548,
-0.18121323,
0.68213209,
0.68213209,
1.23266958,
1.23266958,
0.60913885,
1.41099989,
1.45756718,
1.45756718,
1.45756718,
1.45756718,
1.59526839,
1.82776295,
1.82776295,
1.82776295,
1.82776295,
2.2691274,
2.16897216,
2.18638157,
1.06436284,
0.54726838,
0.54726838,
1.04247971,
0.86777655,
0.86777655,
0.86777655,
0.86777655,
0.61914177,
]
),
NUTS: np.array(
[
0.550575,
0.550575,
0.80046332,
0.91590059,
1.34621916,
1.34621916,
-0.63917773,
-0.65770809,
-0.65770809,
-0.64512868,
-1.05448153,
-0.5225666,
0.14335153,
-0.0034499,
-0.0034499,
0.05309212,
-0.53186371,
0.29325825,
0.43210854,
0.56284837,
0.56284837,
0.38041767,
0.47322034,
0.49937368,
0.49937368,
0.44424258,
0.44424258,
-0.02790848,
-0.40470145,
-0.35725567,
-0.43744228,
0.41955432,
0.31099421,
0.31099421,
0.65811717,
0.66649398,
0.38493786,
0.54114658,
0.54114658,
0.68222408,
0.66404942,
1.44143108,
1.15638799,
-0.06775775,
-0.06775775,
0.30418561,
0.23543403,
0.57934404,
-0.5435111,
-0.47938915,
-0.23816662,
0.36793792,
0.36793792,
0.64980016,
0.52150456,
0.64643321,
0.26130179,
1.10569077,
1.10569077,
1.23662797,
-0.36928735,
-0.14303069,
0.85298904,
0.85298904,
0.31422085,
0.32113762,
0.32113762,
1.0692238,
1.0692238,
1.60127576,
1.49249738,
1.09065107,
0.84264371,
0.84264371,
-0.08832343,
0.04868027,
-0.02679449,
-0.02679449,
0.91989101,
0.65754478,
-0.39220625,
0.08379492,
1.03055634,
1.03055634,
1.71071332,
1.58740483,
1.67905741,
0.77744868,
0.15050587,
0.15050587,
0.73979127,
0.15445515,
0.13134717,
0.85068974,
0.85068974,
0.6974799,
0.16170472,
0.86405959,
0.86405959,
-0.22032854,
]
),
SMC: np.array(
[
0.85565848,
-0.2070422,
0.60432617,
0.82409693,
0.66956559,
1.81128223,
0.5099755,
0.0119065,
0.11877237,
1.04616407,
0.35541975,
0.97711646,
1.08273746,
0.12254112,
-0.21257513,
1.90683915,
0.76584417,
1.61601906,
1.26496997,
0.72605814,
0.27710155,
0.59465936,
1.48848202,
1.48383457,
0.85487729,
0.40339297,
1.11378062,
-0.01154052,
-0.24933346,
0.04855092,
0.44408811,
1.07009768,
0.71832534,
-0.02224531,
0.15732427,
0.7473228,
-0.55976844,
1.83476852,
1.13464918,
1.04477006,
-0.8829072,
0.68610441,
-0.51600679,
1.06577287,
0.72533541,
0.26181682,
0.37045784,
0.49110896,
0.95187099,
0.57052884,
1.18390954,
-0.28471075,
0.51430074,
0.36340121,
0.26524266,
0.91352896,
-0.16906962,
0.02671763,
-0.62019011,
0.13845477,
0.69578153,
0.82213032,
0.95565471,
0.57200968,
0.66751333,
0.74663059,
-0.18802928,
-0.16424154,
0.67661238,
0.9861513,
1.11037445,
0.53367436,
0.81646116,
0.690932,
1.30967756,
0.58455721,
-0.10754287,
-0.6684397,
0.61473599,
0.11205459,
1.50795626,
1.61304945,
0.97329075,
0.80782601,
1.83144756,
0.34256431,
0.4909023,
1.85297991,
0.44832968,
1.35766865,
0.48916414,
0.41003811,
-0.69870992,
0.06616797,
-0.17685457,
-0.04873934,
1.92862499,
0.47539711,
1.19401841,
0.36708951,
2.11504567,
1.1686311,
0.74908099,
0.90147251,
0.6291452,
0.96889866,
0.93871978,
0.74575847,
0.06810142,
0.45469276,
0.2978768,
0.73557954,
-0.33888277,
-0.09913398,
1.12325616,
0.87397745,
-1.14737571,
-0.78658184,
0.67716005,
0.20961373,
0.11759896,
0.72748602,
-0.29959812,
-0.09436507,
0.42100139,
0.0465658,
1.21211627,
0.0406079,
1.38031654,
0.58429982,
0.33843332,
0.82207419,
0.9650973,
1.00370894,
1.23735049,
-0.01960991,
0.77210838,
0.04627416,
-0.62058637,
0.21093913,
-0.15935478,
0.83237714,
0.10157911,
-0.45885337,
1.26207038,
1.07601429,
1.23736173,
0.28618205,
-0.143281,
-0.13159008,
0.74308471,
0.26291269,
0.17504574,
0.55601508,
1.46900656,
0.65130981,
0.89596543,
0.32536767,
-0.25504632,
0.07563599,
1.48775644,
0.28519708,
0.58513646,
-0.63673033,
1.5932429,
0.53826754,
0.41792748,
0.7658319,
0.87290603,
0.89110888,
0.27282434,
-0.20300504,
1.01058742,
0.68072965,
-0.21073937,
1.19114243,
0.63723316,
0.3344412,
1.05599174,
0.78372725,
1.01272241,
-0.19460072,
1.3180811,
0.58658171,
-0.34218688,
0.68725498,
0.37484577,
2.48875469,
-0.06424035,
0.22162324,
-0.21623218,
0.25998442,
0.37801781,
-0.51312723,
-0.35024653,
1.90461235,
0.02214488,
-0.59132457,
0.42870476,
0.88951825,
]
),
}
def setup_class(self):
self.temp_dir = tempfile.mkdtemp()
def teardown_class(self):
shutil.rmtree(self.temp_dir)
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_sample_exact(self):
for step_method in self.master_samples:
self.check_trace(step_method)
def check_trace(self, step_method):
"""Tests whether the trace for step methods is exactly the same as on master.
Code changes that effect how random numbers are drawn may change this, and require
`master_samples` to be updated, but such changes should be noted and justified in the
commit.
This method may also be used to benchmark step methods across commits, by running, for
example
```
BENCHMARK=100000 ./scripts/test.sh -s pymc3/tests/test_step.py:TestStepMethods
```
on multiple commits.
"""
n_steps = 100
with Model() as model:
x = Normal("x", mu=0, sigma=1)
y = Normal("y", mu=x, sigma=1, observed=1)
if step_method.__name__ == "SMC":
trace = sample(
draws=200, random_seed=1, progressbar=False, step=step_method(parallel=False)
)
elif step_method.__name__ == "NUTS":
step = step_method(scaling=model.test_point)
trace = sample(
0, tune=n_steps, discard_tuned_samples=False, step=step, random_seed=1, chains=1
)
else:
trace = sample(
0,
tune=n_steps,
discard_tuned_samples=False,
step=step_method(),
random_seed=1,
chains=1,
)
assert_array_almost_equal(
trace["x"],
self.master_samples[step_method],
decimal=select_by_precision(float64=6, float32=4),
)
def check_stat(self, check, trace, name):
for (var, stat, value, bound) in check:
s = stat(trace[var][2000:], axis=0)
close_to(s, value, bound)
def test_step_continuous(self):
start, model, (mu, C) = mv_simple()
unc = np.diag(C) ** 0.5
check = (("x", np.mean, mu, unc / 10.0), ("x", np.std, unc, unc / 10.0))
with model:
steps = (
Slice(),
HamiltonianMC(scaling=C, is_cov=True, blocked=False),
NUTS(scaling=C, is_cov=True, blocked=False),
Metropolis(S=C, proposal_dist=MultivariateNormalProposal, blocked=True),
Slice(blocked=True),
HamiltonianMC(scaling=C, is_cov=True),
NUTS(scaling=C, is_cov=True),
CompoundStep(
[
HamiltonianMC(scaling=C, is_cov=True),
HamiltonianMC(scaling=C, is_cov=True, blocked=False),
]
),
)
for step in steps:
trace = sample(
0,
tune=8000,
chains=1,
discard_tuned_samples=False,
step=step,
start=start,
model=model,
random_seed=1,
)
self.check_stat(check, trace, step.__class__.__name__)
def test_step_discrete(self):
if theano.config.floatX == "float32":
return # Cannot use @skip because it only skips one iteration of the yield
start, model, (mu, C) = mv_simple_discrete()
unc = np.diag(C) ** 0.5
check = (("x", np.mean, mu, unc / 10.0), ("x", np.std, unc, unc / 10.0))
with model:
steps = (Metropolis(S=C, proposal_dist=MultivariateNormalProposal),)
for step in steps:
trace = sample(
20000, tune=0, step=step, start=start, model=model, random_seed=1, chains=1
)
self.check_stat(check, trace, step.__class__.__name__)
def test_step_categorical(self):
start, model, (mu, C) = simple_categorical()
unc = C ** 0.5
check = (("x", np.mean, mu, unc / 10.0), ("x", np.std, unc, unc / 10.0))
with model:
steps = (
CategoricalGibbsMetropolis(model.x, proposal="uniform"),
CategoricalGibbsMetropolis(model.x, proposal="proportional"),
)
for step in steps:
trace = sample(8000, tune=0, step=step, start=start, model=model, random_seed=1)
self.check_stat(check, trace, step.__class__.__name__)
def test_step_elliptical_slice(self):
start, model, (K, L, mu, std, noise) = mv_prior_simple()
unc = noise ** 0.5
check = (("x", np.mean, mu, unc / 10.0), ("x", np.std, std, unc / 10.0))
with model:
steps = (EllipticalSlice(prior_cov=K), EllipticalSlice(prior_chol=L))
for step in steps:
trace = sample(
5000, tune=0, step=step, start=start, model=model, random_seed=1, chains=1
)
self.check_stat(check, trace, step.__class__.__name__)
class TestMetropolisProposal:
def test_proposal_choice(self):
_, model, _ = mv_simple()
with model:
s = np.ones(model.ndim)
sampler = Metropolis(S=s)
assert isinstance(sampler.proposal_dist, NormalProposal)
s = np.diag(s)
sampler = Metropolis(S=s)
assert isinstance(sampler.proposal_dist, MultivariateNormalProposal)
s[0, 0] = -s[0, 0]
with pytest.raises(np.linalg.LinAlgError):
sampler = Metropolis(S=s)
def test_mv_proposal(self):
np.random.seed(42)
cov = np.random.randn(5, 5)
cov = cov.dot(cov.T)
prop = MultivariateNormalProposal(cov)
samples = np.array([prop() for _ in range(10000)])
npt.assert_allclose(np.cov(samples.T), cov, rtol=0.2)
class TestCompoundStep:
samplers = (Metropolis, Slice, HamiltonianMC, NUTS, DEMetropolis)
@pytest.mark.skipif(
theano.config.floatX == "float32", reason="Test fails on 32 bit due to linalg issues"
)
def test_non_blocked(self):
"""Test that samplers correctly create non-blocked compound steps."""
_, model = simple_2model_continuous()
with model:
for sampler in self.samplers:
assert isinstance(sampler(blocked=False), CompoundStep)
@pytest.mark.skipif(
theano.config.floatX == "float32", reason="Test fails on 32 bit due to linalg issues"
)
def test_blocked(self):
_, model = simple_2model_continuous()
with model:
for sampler in self.samplers:
sampler_instance = sampler(blocked=True)
assert not isinstance(sampler_instance, CompoundStep)
assert isinstance(sampler_instance, sampler)
class TestAssignStepMethods:
def test_bernoulli(self):
"""Test bernoulli distribution is assigned binary gibbs metropolis method"""
with Model() as model:
Bernoulli("x", 0.5)
steps = assign_step_methods(model, [])
assert isinstance(steps, BinaryGibbsMetropolis)
def test_normal(self):
"""Test normal distribution is assigned NUTS method"""
with Model() as model:
Normal("x", 0, 1)
steps = assign_step_methods(model, [])
assert isinstance(steps, NUTS)
def test_categorical(self):
"""Test categorical distribution is assigned categorical gibbs metropolis method"""
with Model() as model:
Categorical("x", np.array([0.25, 0.75]))
steps = assign_step_methods(model, [])
assert isinstance(steps, BinaryGibbsMetropolis)
with Model() as model:
Categorical("y", np.array([0.25, 0.70, 0.05]))
steps = assign_step_methods(model, [])
assert isinstance(steps, CategoricalGibbsMetropolis)
def test_binomial(self):
"""Test binomial distribution is assigned metropolis method."""
with Model() as model:
Binomial("x", 10, 0.5)
steps = assign_step_methods(model, [])
assert isinstance(steps, Metropolis)
def test_normal_nograd_op(self):
"""Test normal distribution without an implemented gradient is assigned slice method"""
with Model() as model:
x = Normal("x", 0, 1)
# a custom Theano Op that does not have a grad:
is_64 = theano.config.floatX == "float64"
itypes = [tt.dscalar] if is_64 else [tt.fscalar]
otypes = [tt.dscalar] if is_64 else [tt.fscalar]
@theano.as_op(itypes, otypes)
def kill_grad(x):
return x
data = np.random.normal(size=(100,))
Normal("y", mu=kill_grad(x), sigma=1, observed=data.astype(theano.config.floatX))
steps = assign_step_methods(model, [])
assert isinstance(steps, Slice)
class TestPopulationSamplers:
steppers = [DEMetropolis]
def test_checks_population_size(self):
"""Test that population samplers check the population size."""
with Model() as model:
n = Normal("n", mu=0, sigma=1)
for stepper in TestPopulationSamplers.steppers:
step = stepper()
with pytest.raises(ValueError):
trace = sample(draws=100, chains=1, step=step)
trace = sample(draws=100, chains=4, step=step)
pass
def test_nonparallelized_chains_are_random(self):
with Model() as model:
x = Normal("x", 0, 1)
for stepper in TestPopulationSamplers.steppers:
step = stepper()
trace = sample(chains=4, cores=1, draws=20, tune=0, step=DEMetropolis())
samples = np.array(trace.get_values("x", combine=False))[:, 5]
assert len(set(samples)) == 4, "Parallelized {} " "chains are identical.".format(
stepper
)
pass
def test_parallelized_chains_are_random(self):
with Model() as model:
x = Normal("x", 0, 1)
for stepper in TestPopulationSamplers.steppers:
step = stepper()
trace = sample(chains=4, cores=4, draws=20, tune=0, step=DEMetropolis())
samples = np.array(trace.get_values("x", combine=False))[:, 5]
assert len(set(samples)) == 4, "Parallelized {} " "chains are identical.".format(
stepper
)
pass
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
class TestNutsCheckTrace:
def test_multiple_samplers(self, caplog):
with Model():
prob = Beta("prob", alpha=5.0, beta=3.0)
Binomial("outcome", n=1, p=prob)
caplog.clear()
sample(3, tune=2, discard_tuned_samples=False, n_init=None, chains=1)
messages = [msg.msg for msg in caplog.records]
assert all("boolean index did not" not in msg for msg in messages)
def test_bad_init_nonparallel(self):
with Model():
HalfNormal("a", sigma=1, testval=-1, transform=None)
with pytest.raises(SamplingError) as error:
sample(init=None, chains=1, random_seed=1)
error.match("Bad initial")
@pytest.mark.skipif(sys.version_info < (3, 6), reason="requires python3.6 or higher")
def test_bad_init_parallel(self):
with Model():
HalfNormal("a", sigma=1, testval=-1, transform=None)
with pytest.raises(ParallelSamplingError) as error:
sample(init=None, cores=2, random_seed=1)
error.match("Bad initial")
def test_linalg(self, caplog):
with Model():
a = Normal("a", shape=2)
a = tt.switch(a > 0, np.inf, a)
b = tt.slinalg.solve(floatX(np.eye(2)), a)
Normal("c", mu=b, shape=2)
caplog.clear()
trace = sample(20, init=None, tune=5, chains=2)
warns = [msg.msg for msg in caplog.records]
assert np.any(trace["diverging"])
assert (
any("divergence after tuning" in warn for warn in warns)
or any("divergences after tuning" in warn for warn in warns)
or any("only diverging samples" in warn for warn in warns)
)
with pytest.raises(ValueError) as error:
trace.report.raise_ok()
error.match("issues during sampling")
assert not trace.report.ok
def test_sampler_stats(self):
with Model() as model:
x = Normal("x", mu=0, sigma=1)
trace = sample(draws=10, tune=1, chains=1)
# Assert stats exist and have the correct shape.
expected_stat_names = {
"depth",
"diverging",
"energy",
"energy_error",
"model_logp",
"max_energy_error",
"mean_tree_accept",
"step_size",
"step_size_bar",
"tree_size",
"tune",
}
assert trace.stat_names == expected_stat_names
for varname in trace.stat_names:
assert trace.get_sampler_stats(varname).shape == (10,)
# Assert model logp is computed correctly: computing post-sampling
# and tracking while sampling should give same results.
model_logp_ = np.array(
[model.logp(trace.point(i, chain=c)) for c in trace.chains for i in range(len(trace))]
)
assert (trace.model_logp == model_logp_).all()
| 31.447162 | 100 | 0.438937 |
6a79d11c9fb366d951ce21a21e9a831ad862579d | 17,524 | py | Python | nvflare/private/fed/client/client_executor.py | Can-Zhao/NVFlare | 52d9dee9972b32d49d9a8add52a652ce3bb878b5 | [
"Apache-2.0"
] | 155 | 2021-08-05T18:05:09.000Z | 2022-03-27T15:32:56.000Z | nvflare/private/fed/client/client_executor.py | Can-Zhao/NVFlare | 52d9dee9972b32d49d9a8add52a652ce3bb878b5 | [
"Apache-2.0"
] | 216 | 2021-12-01T06:07:12.000Z | 2022-03-30T23:34:02.000Z | nvflare/private/fed/client/client_executor.py | Can-Zhao/NVFlare | 52d9dee9972b32d49d9a8add52a652ce3bb878b5 | [
"Apache-2.0"
] | 44 | 2021-11-24T16:03:29.000Z | 2022-03-24T23:28:39.000Z | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import math
import os
import shlex
import subprocess
import sys
import threading
import time
from multiprocessing.connection import Client
from nvflare.apis.fl_constant import AdminCommandNames, ReturnCode
from nvflare.apis.shareable import Shareable, make_reply
from nvflare.fuel.utils.pipe.file_pipe import FilePipe
from .client_status import ClientStatus, get_status_message
class ClientExecutor(object):
def __init__(self, uid) -> None:
pipe_path = "/tmp/fl/" + uid + "/comm"
if not os.path.exists(pipe_path):
os.makedirs(pipe_path)
self.pipe = FilePipe(root_path=pipe_path, name="training")
self.logger = logging.getLogger(self.__class__.__name__)
def start_train(self, client, args, app_root, app_custom_folder, listen_port):
"""
start_train method to start the FL client training.
:param client: the FL client object.
:param args: admin command arguments for starting the FL client training.
:param app_root: the root folder of the running APP.
:return:
"""
pass
def start_mgpu_train(self, client, args, app_root, gpu_number, app_custom_folder, listen_port):
"""
start the FL client training using multi-GPU.
:param client: the FL client object.
:param args: admin command arguments for starting the FL client training.
:param app_root: the root folder of the running APP.
:param gpu_number: number of GPUs to run FL training
:return:
"""
pass
def check_status(self, client):
"""
check the status of the running client.
:param client: the FL client object.
:return: running FL client status message.
"""
pass
def abort_train(self, client):
"""
To abort the running client.
:param client: the FL client object.
:return: N/A
"""
pass
def abort_task(self, client):
"""
To abort the client executing task.
:param client: the FL client object.
:return: N/A
"""
pass
def get_run_info(self):
"""
To get the run_info from the InfoCollector.
Returns:
"""
pass
def get_errors(self):
"""
To get the error_info from the InfoCollector.
Returns:
"""
pass
def reset_errors(self):
"""
To reset the error_info for the InfoCollector.
Returns:
"""
pass
def send_aux_command(self, shareable: Shareable):
"""
To send the aux command to child process.
Returns:
"""
pass
def cleanup(self):
self.pipe.clear()
class ProcessExecutor(ClientExecutor):
"""
Run the Client executor in a child process.
"""
def __init__(self, uid):
ClientExecutor.__init__(self, uid)
# self.client = client
self.conn_client = None
# self.pool = None
self.listen_port = 6000
self.lock = threading.Lock()
def get_conn_client(self):
if not self.conn_client:
try:
address = ("localhost", self.listen_port)
self.conn_client = Client(address, authkey="client process secret password".encode())
except Exception as e:
pass
def create_pipe(self):
"""Create pipe to communicate between child (training) and main (logic) thread."""
pipe = FilePipe(root_path="/fl/server", name="training")
return pipe
def start_train(self, client, args, app_root, app_custom_folder, listen_port):
# self.pool = multiprocessing.Pool(processes=1)
# result = self.pool.apply_async(_start_client, (client, args, app_root))
# self.conn_client, child_conn = mp.Pipe()
# process = multiprocessing.Process(target=_start_client, args=(client, args, app_root, child_conn, self.pipe))
# # process = multiprocessing.Process(target=_start_new)
# process.start()
self.listen_port = listen_port
new_env = os.environ.copy()
if app_custom_folder != "":
new_env["PYTHONPATH"] = new_env["PYTHONPATH"] + ":" + app_custom_folder
# self.retrieve_cross_validate_setting(client, app_root)
command_options = ""
for t in args.set:
command_options += " " + t
command = (
f"{sys.executable} -m nvflare.private.fed.app.client.worker_process -m "
+ args.workspace
+ " -s fed_client.json "
" --set" + command_options + " print_conf=True"
)
# use os.setsid to create new process group ID
process = subprocess.Popen(shlex.split(command, " "), preexec_fn=os.setsid, env=new_env)
print("training child process ID: {}".format(process.pid))
client.process = process
client.multi_gpu = False
client.status = ClientStatus.STARTED
thread = threading.Thread(
target=self.wait_training_process_finish, args=(client, args, app_root, app_custom_folder)
)
thread.start()
# def retrieve_cross_validate_setting(self, client, app_root):
# if client.config_folder == "":
# client_config = "config_fed_client.json"
# else:
# client_config = client.config_folder + "/config_fed_client.json"
# client_config = os.path.join(app_root, client_config)
# conf = Configurator(
# app_root=app_root,
# cmd_vars={},
# env_config={},
# wf_config_file_name=client_config,
# base_pkgs=[],
# module_names=[],
# )
# conf.configure()
# client.cross_site_validate = conf.wf_config_data.get("cross_validate", False)
# def start_mgpu_train(self, client, args, app_root, gpu_number, app_custom_folder, listen_port):
# self.listen_port = listen_port
#
# new_env = os.environ.copy()
# new_env["PYTHONPATH"] = new_env["PYTHONPATH"] + ":" + app_custom_folder
#
# # self.retrieve_cross_validate_setting(client, app_root)
#
# if client.platform == "PT":
# command = (
# f"{sys.executable} -m torch.distributed.launch --nproc_per_node="
# + str(gpu_number)
# + " --nnodes=1 --node_rank=0 "
# + '--master_addr="localhost" --master_port=1234 '
# + "-m nvflare.private.fed.app.client.worker_process -m "
# + args.workspace
# + " -s fed_client.json "
# " --set secure_train="
# + str(client.secure_train)
# + " print_conf=True use_gpu=True multi_gpu=True uid="
# + client.client_name
# + " config_folder="
# + client.config_folder
# )
# # use os.setsid to create new process group ID
# process = subprocess.Popen(shlex.split(command, " "), preexec_fn=os.setsid, env=new_env)
# else:
# command = (
# "mpirun -np "
# + str(gpu_number)
# + " -H localhost:"
# + str(gpu_number)
# + " -bind-to none -map-by slot -x NCCL_DEBUG=DEBUG -x LD_LIBRARY_PATH -x PATH "
# "-mca pml ob1 -mca btl ^openib --allow-run-as-root "
# f"{sys.executable} -u -m nvmidl.apps.fed_learn.client.worker_process -m "
# + args.workspace
# + " -s fed_client.json --set secure_train="
# + str(client.secure_train)
# + " multi_gpu=true uid="
# + client.client_name
# + " config_folder="
# + client.config_folder
# )
# process = subprocess.Popen(shlex.split(command, " "), env=new_env)
# client.process = process
# client.multi_gpu = True
# # self.pool = multiprocessing.Pool(processes=1)
# # result = self.pool.apply_async(self.call_mpirun, (client, args, app_root))
#
# client.status = ClientStatus.STARTED
# thread = threading.Thread(
# target=self.wait_training_process_finish, args=(client, args, app_root, app_custom_folder)
# )
# thread.start()
def check_status(self, client):
try:
self.get_conn_client()
if self.conn_client:
data = {"command": AdminCommandNames.CHECK_STATUS, "data": {}}
self.conn_client.send(data)
status_message = self.conn_client.recv()
print("check status from process listener......")
return status_message
else:
return get_status_message(client.status)
except:
self.logger.error("Check_status execution exception.")
return "execution exception. Please try again."
def get_run_info(self):
try:
self.get_conn_client()
if self.conn_client:
data = {"command": AdminCommandNames.SHOW_STATS, "data": {}}
self.conn_client.send(data)
run_info = self.conn_client.recv()
return run_info
else:
return {}
except:
self.logger.error("get_run_info() execution exception.")
return {"error": "no info collector. Please try again."}
def get_errors(self):
try:
self.get_conn_client()
if self.conn_client:
data = {"command": AdminCommandNames.SHOW_ERRORS, "data": {}}
self.conn_client.send(data)
errors_info = self.conn_client.recv()
return errors_info
else:
return None
except:
self.logger.error("get_errors() execution exception.")
return None
def reset_errors(self):
try:
self.get_conn_client()
if self.conn_client:
data = {"command": AdminCommandNames.RESET_ERRORS, "data": {}}
self.conn_client.send(data)
except:
self.logger.error("reset_errors() execution exception.")
def send_aux_command(self, shareable: Shareable):
try:
self.get_conn_client()
if self.conn_client:
data = {"command": AdminCommandNames.AUX_COMMAND, "data": shareable}
self.conn_client.send(data)
reply = self.conn_client.recv()
return reply
else:
return make_reply(ReturnCode.EXECUTION_EXCEPTION)
except:
return make_reply(ReturnCode.EXECUTION_EXCEPTION)
def abort_train(self, client):
# if client.status == ClientStatus.CROSS_SITE_VALIDATION:
# # Only aborts cross site validation.
# client.abort()
# elif client.status == ClientStatus.TRAINING_STARTED:
if client.status == ClientStatus.STARTED:
with self.lock:
if client.process:
# if client.platform == 'PT' and client.multi_gpu:
# # kill the sub-process group directly
# os.killpg(os.getpgid(client.process.pid), 9)
# else:
# client.process.terminate()
# kill the sub-process group directly
if self.conn_client:
data = {"command": AdminCommandNames.ABORT, "data": {}}
self.conn_client.send(data)
self.logger.debug("abort sent")
# wait for client to handle abort
time.sleep(2.0)
# kill the sub-process group directly
try:
os.killpg(os.getpgid(client.process.pid), 9)
self.logger.debug("kill signal sent")
except Exception as e:
pass
client.process.terminate()
self.logger.debug("terminated")
# if self.pool:
# self.pool.terminate()
if self.conn_client:
self.conn_client.close()
self.conn_client = None
self.cleanup()
self.logger.info("Client training was terminated.")
def abort_task(self, client):
if client.status == ClientStatus.STARTED:
if self.conn_client:
data = {"command": AdminCommandNames.ABORT_TASK, "data": {}}
self.conn_client.send(data)
self.logger.debug("abort_task sent")
def wait_training_process_finish(self, client, args, app_root, app_custom_folder):
# wait for the listen_command thread to start, and send "start" message to wake up the connection.
start = time.time()
while True:
self.get_conn_client()
if self.conn_client:
data = {"command": AdminCommandNames.START_APP, "data": {}}
self.conn_client.send(data)
break
time.sleep(1.0)
if time.time() - start > 15:
break
self.logger.info("waiting for process to finish")
client.process.wait()
returncode = client.process.returncode
self.logger.info(f"process finished with execution code: {returncode}")
with self.lock:
client.process = None
if self.conn_client:
self.conn_client.close()
self.conn_client = None
# # result.get()
# self.pool.close()
# self.pool.join()
# self.pool.terminate()
# Not to run cross_validation in a new process any more.
client.cross_site_validate = False
client.status = ClientStatus.STOPPED
def close(self):
if self.conn_client:
data = {"command": AdminCommandNames.SHUTDOWN, "data": {}}
self.conn_client.send(data)
self.conn_client = None
self.cleanup()
# class ThreadExecutor(ClientExecutor):
# def __init__(self, client, executor):
# self.client = client
# self.executor = executor
# def start_train(self, client, args, app_root, app_custom_folder, listen_port):
# future = self.executor.submit(lambda p: _start_client(*p), [client, args, app_root])
# def start_mgpu_train(self, client, args, app_root, gpu_number, app_custom_folder, listen_port):
# self.start_train(client, args, app_root)
# def check_status(self, client):
# return get_status_message(self.client.status)
# def abort_train(self, client):
# self.client.train_end = True
# self.client.fitter.train_ctx.ask_to_stop_immediately()
# self.client.fitter.train_ctx.set_prop("early_end", True)
# # self.client.model_manager.close()
# # self.client.status = ClientStatus.TRAINING_STOPPED
# return "Aborting the client..."
def update_client_properties(client, trainer):
# servers = [{t['name']: t['service']} for t in trainer.server_config]
retry_timeout = 30
# if trainer.client_config['retry_timeout']:
# retry_timeout = trainer.client_config['retry_timeout']
client.client_args = trainer.client_config
# client.servers = sorted(servers)[0]
# client.model_manager.federated_meta = {task_name: list() for task_name in tuple(client.servers)}
exclude_vars = trainer.client_config.get("exclude_vars", "dummy")
# client.model_manager.exclude_vars = re.compile(exclude_vars) if exclude_vars else None
# client.model_manager.privacy_policy = trainer.privacy
# client.model_manager.model_reader_writer = trainer.model_reader_writer
# client.model_manager.model_validator = trainer.model_validator
# client.pool = ThreadPool(len(client.servers))
# client.communicator.ssl_args = trainer.client_config
# client.communicator.secure_train = trainer.secure_train
# client.communicator.model_manager = client.model_manager
client.communicator.should_stop = False
client.communicator.retry = int(math.ceil(float(retry_timeout) / 5))
# client.communicator.outbound_filters = trainer.outbound_filters
# client.communicator.inbound_filters = trainer.inbound_filters
client.handlers = trainer.handlers
# client.inbound_filters = trainer.inbound_filters
client.executors = trainer.executors
# client.task_inbound_filters = trainer.task_inbound_filters
# client.task_outbound_filters = trainer.task_outbound_filters
# client.secure_train = trainer.secure_train
client.heartbeat_done = False
# client.fl_ctx = FLContext()
| 36.970464 | 119 | 0.594214 |
11fd80f587d5b158869efd47a0af863271c09bff | 236 | py | Python | code/matplotlib/test.py | qiudebo/13learn | 32b6ab0c6f6abd5873e3445b31a86f602520d473 | [
"MIT"
] | 1 | 2019-03-07T09:02:27.000Z | 2019-03-07T09:02:27.000Z | code/matplotlib/test.py | qiudebo/13learn | 32b6ab0c6f6abd5873e3445b31a86f602520d473 | [
"MIT"
] | null | null | null | code/matplotlib/test.py | qiudebo/13learn | 32b6ab0c6f6abd5873e3445b31a86f602520d473 | [
"MIT"
] | null | null | null | # *-* coding:utf-8 *-*
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import style
import os
from os import path
from matplotlib.font_manager import fontManager
# 图表坐标系
| 11.8 | 48 | 0.699153 |
3588bb38100b775456e66128b76bd04ad107b891 | 52,545 | py | Python | spytest/apis/system/pfc.py | emilmih/sonic-mgmt | e4e42ec8028bf51b39587e2b53e526d505fe7938 | [
"Apache-2.0"
] | 132 | 2016-10-19T12:34:44.000Z | 2022-03-16T09:00:39.000Z | spytest/apis/system/pfc.py | emilmih/sonic-mgmt | e4e42ec8028bf51b39587e2b53e526d505fe7938 | [
"Apache-2.0"
] | 3,152 | 2016-09-21T23:05:58.000Z | 2022-03-31T23:29:08.000Z | spytest/apis/system/pfc.py | emilmih/sonic-mgmt | e4e42ec8028bf51b39587e2b53e526d505fe7938 | [
"Apache-2.0"
] | 563 | 2016-09-20T01:00:15.000Z | 2022-03-31T22:43:54.000Z | import json
from spytest import st
from apis.system.interface import interface_status_show, clear_interface_counters
from spytest.utils import filter_and_select
from utilities.common import make_list
from utilities.utils import get_interface_number_from_name
from apis.system.rest import config_rest, delete_rest, get_rest
errors_list = ['error', 'invalid', 'usage', 'illegal', 'unrecognized']
def config_pfc_asymmetric(dut, mode, interface = [], **kwargs):
"""
To configure asymmetric mode on ports
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param dut:
:type dut:
:param type:
:type on|off:
:param interface:
:type list():
:param cli_type:
:type cli_type:
"""
cli_type = st.get_ui_type(dut, **kwargs)
skip_error = kwargs.get('skip_error', False)
errors = make_list(kwargs.get('error_msg')) if kwargs.get('error_msg') else errors_list
if mode not in ['on','off'] or not interface:
st.error("Mode can take on|off values only, interface cannot be empty")
return False
interface = make_list(interface)
commands = list()
if cli_type == 'click':
for intf in interface:
intf = st.get_other_names(dut, [intf])[0] if '/' in intf else intf
commands.append("sudo pfc config asymmetric {} {}".format(mode,intf))
elif cli_type == 'klish':
no_form = "" if mode == 'on' else "no"
for intf in interface:
intf_data = get_interface_number_from_name(intf)
commands.append("interface {} {}".format(intf_data['type'], intf_data['number']))
commands.append("{} priority-flow-control asymmetric".format(no_form))
commands.append("exit")
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
asym = True if mode == 'on' else False
config_data = {"openconfig-qos-ext:config": {"asymmetric": asym}}
for intf in interface:
url = rest_urls['pfc_asymmetric_config'].format(intf)
if not config_rest(dut, rest_url = url, http_method=cli_type, json_data=config_data):
st.error("Failed to configure asymmetric mode: {} on port: {}".format(mode, intf))
return False
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
if commands:
response = st.config(dut, commands, type=cli_type, skip_error_check=skip_error)
if any(error.lower() in response.lower() for error in errors):
st.error("The response is: {}".format(response))
return False
return True
def config_pfc_lossless_queues(dut, queues_list, ports_list, **kwargs):
"""
To configure lossless priorities on port
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param dut:
:type dut:
:param queues_list:
:type list:
:param cli_type:
:type cli_type:
"""
cli_type = st.get_ui_type(dut, **kwargs)
config = kwargs.get('config', True)
skip_error = kwargs.get('skip_error', False)
errors = make_list(kwargs.get('error_msg')) if kwargs.get('error_msg') else errors_list
cli_type = 'klish' if skip_error and cli_type == 'click' else cli_type
ports = make_list(ports_list)
queues = make_list(queues_list)
if cli_type == 'click':
queues = ",".join([str(queue) for queue in queues]) if config else ""
final_data = dict()
temp_data = dict()
for port in ports:
port = st.get_other_names(dut, [port])[0] if '/' in port else port
temp_data[port] = {"pfc_enable": queues}
final_data['PORT_QOS_MAP'] = temp_data
final_data = json.dumps(final_data)
st.apply_json2(dut, final_data)
elif cli_type == 'klish':
commands = list()
no_form = "" if config else "no"
for port in ports:
intf_data = get_interface_number_from_name(port)
commands.append('interface {} {}'.format(intf_data['type'], intf_data['number']))
commands.extend(['{} priority-flow-control priority {}'.format(no_form, queue) for queue in queues])
commands.append('exit')
response = st.config(dut, commands, type=cli_type, skip_error_check=skip_error)
if any(error.lower() in response.lower() for error in errors):
st.error("The response is: {}".format(response))
return False
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
message = "loss-less" if config else "lossy"
for port in ports:
for queue in queues:
url = rest_urls['pfc_lossless_queue_config'].format(port)
config_data = {"openconfig-qos-ext:pfc-priorities": {"pfc-priority": [{"dot1p": int(queue), "config": {"dot1p": int(queue), "enable": config}}]}}
if not config_rest(dut, rest_url = url, http_method=cli_type, json_data=config_data):
st.error("Failed to configure the priority: {} as {} on port: {}".format(queue, message, port))
return False
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
return True
def verify_pfc_asymmetric(dut, ports, mode, cli_type=''):
"""
To configure lossless priorities on port
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param dut:
:type dut:
:param ports:
:type list:
:param mode:
:type on/off:
:param cli_type:
:type cli_type:
"""
cli_type = st.get_ui_type(dut, cli_type=cli_type)
ports = make_list(ports)
if mode not in ['on', 'off']:
st.error("Mode can take on|off values only")
return False
if cli_type == 'click':
command = "pfc show asymmetric"
output = st.show(dut, command, type=cli_type)
asym_mode = ['off', 'N/A'] if mode == 'off' else 'on'
for port in ports:
port = st.get_other_names(dut, [port])[0] if '/' in port else port
entry = filter_and_select(output, ['pfc_asymmetric'], {'interface': port})
if not (len(entry) and entry[0]['pfc_asymmetric'] in asym_mode):
st.error('Provided asymmetric mode: {} not matching with the actual mode: {} on port: {}'.format(mode, asym_mode, port))
return False
elif cli_type == 'klish':
for port in ports:
intf_data = get_interface_number_from_name(port)
command = "show qos interface {} {}".format(intf_data['type'], intf_data['number'])
output = st.show(dut, command, type=cli_type)
entry = filter_and_select(output, None, {'pfc_asymmetric': mode})
if not entry:
st.error('Provided asymmetric mode: {} not matching with the actual mode on port: {}'.format(mode, port))
return False
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
asym_mode = True if mode=='on' else False
verify_payload = {"openconfig-qos-ext:asymmetric": asym_mode}
for port in ports:
url = rest_urls['pfc_asymmetric_get'].format(port)
out = get_rest(dut, rest_url = url)
if not out['output'] == verify_payload:
st.error('Provided asymmetric mode: {} not matching with the actual mode on port: {}'.format(mode, port))
return False
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
return True
def start_pfc_wd(dut,action,detection_time,restoration_time,interface=[], **kwargs):
"""
To configure PFC Watch-Dog parameters
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param dut:
:type dut:
:param action:
:type action:
:param detection_time:
:type detection_time:
:param restoration_time:
:type restoration_time:
:param interface:
:type interface:
"""
if not interface:
st.error("Please provide atleast one interface")
return False
cli_type = st.get_ui_type(dut, **kwargs)
skip_error = kwargs.get('skip_error', False)
errors = make_list(kwargs.get('error_msg')) if kwargs.get('error_msg') else errors_list
interfaces = make_list(interface)
commands = list()
if cli_type == 'click':
for intf in interfaces:
intf = st.get_other_names(dut, [intf])[0] if '/' in intf else intf
commands.append("pfcwd start --action {} ports {} detection-time {} --restoration-time {}".format(action,intf,detection_time,restoration_time))
elif cli_type == 'klish':
for intf in interfaces:
intf_data = get_interface_number_from_name(intf)
commands.append("interface {} {}".format(intf_data['type'], intf_data['number']))
commands.append("priority-flow-control watchdog action {}".format(action))
commands.append("priority-flow-control watchdog detect-time {}".format(detection_time))
commands.append("priority-flow-control watchdog restore-time {}".format(restoration_time))
commands.append("exit")
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
config_data = {"openconfig-qos-ext:config": {"action": action.upper(), "detection-time": int(detection_time), "restoration-time": int(restoration_time)}}
for intf in interfaces:
url= rest_urls['pfc_wd_interface_config'].format(intf)
if not config_rest(dut, rest_url = url, http_method=cli_type, json_data=config_data):
st.error("Failed to configure PFC watch dog parameters on port: {}".format(intf))
return False
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
if commands:
response = st.config(dut, commands, type=cli_type, skip_error_check=skip_error)
if any(error.lower() in response.lower() for error in errors):
st.error("The response is: {}".format(response))
return False
return True
def stop_pfc_wd(dut,interface=[], **kwargs):
"""
To configure PFC Watch-Dog as OFF
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param dut:
:type dut:
:param interface:
:type interface:
"""
if not interface:
st.error("Please provide atleast one interface")
return False
cli_type = st.get_ui_type(dut, **kwargs)
skip_error = kwargs.get('skip_error', False)
errors = make_list(kwargs.get('error_msg')) if kwargs.get('error_msg') else errors_list
interfaces = make_list(interface)
commands = list()
if cli_type == 'click':
for intf in interfaces:
intf = st.get_other_names(dut, [intf])[0] if '/' in intf else intf
commands.append("pfcwd stop {}".format(intf))
elif cli_type == 'klish':
for intf in interfaces:
intf_data = get_interface_number_from_name(intf)
commands.append("interface {} {}".format(intf_data['type'], intf_data['number']))
commands.append("priority-flow-control watchdog off")
commands.append("exit")
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
for intf in interfaces:
url = rest_urls['pfc_wd_interface_config'].format(intf)
if not delete_rest(dut, rest_url= url):
st.error("Failed to stop PFC watch dog on {}".format(intf))
return False
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
if commands:
response = st.config(dut, commands, type=cli_type, skip_error_check=skip_error)
if any(error.lower() in response.lower() for error in errors):
st.error("The response is: {}".format(response))
return False
return True
def pfc_wd_counter_poll_interval(dut, interval, **kwargs):
"""
To configure PFC Watch-Dog polling interval
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param dut:
:type dut:
:param interval:
:type interval:
"""
cli_type = st.get_ui_type(dut, **kwargs)
skip_error = kwargs.get('skip_error', False)
errors = make_list(kwargs.get('error_msg')) if kwargs.get('error_msg') else errors_list
command = ''
if cli_type == 'click':
command = "pfcwd interval {}".format(interval)
elif cli_type == 'klish':
command = "priority-flow-control watchdog polling-interval {}".format(interval)
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
url = rest_urls['pfc_wd_global_config']
poll_config = {"openconfig-qos-ext:pfc-watchdog": {"poll": {"config": {"poll-interval": int(interval)}}}}
if not config_rest(dut, rest_url = url, http_method=cli_type, json_data=poll_config):
st.error('Failed to configure PFC Watch-Dog polling interval as: {}'.format(interval))
return False
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
if command:
response = st.config(dut, command, type=cli_type, skip_error_check=skip_error)
if any(error.lower() in response.lower() for error in errors):
st.error("The response is: {}".format(response))
return False
return True
def pfc_wd_counter_poll_config(dut, enable, **kwargs):
"""
To enable/disable PFC Watch-Dog counter poll
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param dut:
:type dut:
:param dut:
:type True/False:
"""
cli_type = st.get_ui_type(dut, **kwargs)
skip_error = kwargs.get('skip_error', False)
errors = make_list(kwargs.get('error_msg')) if kwargs.get('error_msg') else errors_list
command = ''
if cli_type == 'click':
mode = 'enable' if enable else 'disable'
command = "pfcwd counter_poll {}".format(mode)
elif cli_type == 'klish':
command = 'priority-flow-control watchdog counter-poll' if enable else 'no priority-flow-control watchdog counter-poll'
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
url = rest_urls['pfc_wd_global_config']
mode = 'ENABLE' if enable else 'DISABLE'
config_data = {"openconfig-qos-ext:pfc-watchdog": {"flex": {"config": {"counter-poll": mode}}}}
if not config_rest(dut, rest_url = url, http_method=cli_type, json_data=config_data):
st.error('Failed to {} PFC Watch-Dog counter poll'.format(mode))
return False
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
if command:
response = st.config(dut, command, type=cli_type, skip_error_check=skip_error)
if any(error.lower() in response.lower() for error in errors):
st.error("The response is: {}".format(response))
return False
return True
def show_pfc_wd_config(dut, ports=[], **kwargs):
"""
To get PFC Watch-Dog configuration
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom)
:param dut:
:type dut:
:param ports:
:type list:
:return:
"""
cli_type = st.get_ui_type(dut, **kwargs)
cli_type = cli_type if ports else 'click'
ports = make_list(ports)
if cli_type == 'click':
command = "pfcwd show config"
output = st.show(dut, command, type=cli_type)
elif cli_type == 'klish':
output = list()
for port in ports:
intf_data = get_interface_number_from_name(port)
command = "show qos interface {} {}".format(intf_data['type'], intf_data['number'])
out = st.show(dut, command, type=cli_type)
_ = out[0].update(interface=port) if out and isinstance(out, list) and isinstance(out[0], dict) else out
output.extend(out)
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
output = list()
for port in ports:
url = rest_urls['get_pfc_params'].format(port)
out = get_rest(dut, rest_url = url)
if (out and ('output' in out) and out.get('output')):
out = _get_rest_pfc_params_config(out['output'])
_ = out[0].update(interface=port) if out and isinstance(out, list) and isinstance(out[0], dict) else out
output.extend(out)
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
return output
def show_pfc_wd_stats(dut, **kwargs):
"""
To get PFC Watch-Dog statistics
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom)
:param dut:
:type dut:
:param ports:
:type ports:
:return:
"""
cli_type = st.get_ui_type(dut, **kwargs)
ports = make_list(kwargs.get('ports', []))
command = ''
if cli_type == 'click':
command = "pfcwd show stats"
output = st.show(dut, command, type=cli_type)
elif cli_type == 'klish':
temp_vars = st.get_testbed_vars()
if not ports:
port = 'Eth all' if temp_vars.config.ifname_type == 'alias' else 'Ethernet all'
command = "show qos interface {} priority-flow-control statistics queue".format(port)
output = st.show(dut, command, type=cli_type)
else:
output = list()
for port in ports:
intf_data = get_interface_number_from_name(port)
command = "show qos interface {} {} priority-flow-control statistics queue".format(intf_data['type'], intf_data['number'])
output.extend(st.show(dut, command, type=cli_type))
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
if not ports:
url = rest_urls['get_pfc_all_counters']
out = get_rest(dut, rest_url=url, timeout=120)
if not (out and ('output' in out) and out.get('output')):
st.error("No data found in output: {}".format(out))
return False
output = _get_rest_pfc_wd_stats_all(out['output'])
else:
output = list()
for port in ports:
url = rest_urls['get_pfcwd_counters'].format(port)
out = get_rest(dut, rest_url=url, timeout=20)
if not (out and ('output' in out) and out.get('output')):
st.error("No data found in output: {}".format(out))
return False
output.extend(_get_rest_pfc_wd_stats(out['output'], port))
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
return output
def show_asymmetric_pfc(dut, ports=[], cli_type=''):
"""
To show asymmetric PFC configuration on ports
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param dut:
:type dut:
:param ports:
:type list:
:param cli_type:
:type cli_type:
"""
cli_type = st.get_ui_type(dut, cli_type=cli_type)
cli_type = cli_type if ports else 'click'
ports = make_list(ports)
if cli_type == 'click':
command = "pfc show asymmetric"
output = st.show(dut, command, type=cli_type)
elif cli_type == 'klish':
output = list()
for port in ports:
intf_data = get_interface_number_from_name(port)
command = "show qos interface {} {}".format(intf_data['type'], intf_data['number'])
out = st.show(dut, command, type=cli_type)
_ = out[0].update(interface=port) if out and isinstance(out, list) and isinstance(out[0], dict) else out
output.extend(out)
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
output = list()
for port in ports:
url = rest_urls['get_pfc_params'].format(port)
out = get_rest(dut, rest_url = url)
if (out and ('output' in out) and out.get('output')):
out = _get_rest_pfc_params_config(out['output'])
_ = out[0].update(interface=port) if out and isinstance(out, list) and isinstance(out[0], dict) else out
output.extend(out)
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
return output
def clear_pfc_counters(dut, **kwargs):
"""
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:return:
"""
cli_type = st.get_ui_type(dut, **kwargs)
cli_type = 'klish' if cli_type in ['rest-patch', 'rest-put'] else cli_type #Clear commands use RPC calls for those OC-YANG URLs won't be available
if cli_type == 'click':
command = "sonic-clear pfccounters"
st.show(dut, command, skip_tmpl=True)
elif cli_type == 'klish':
if not clear_interface_counters(dut, **kwargs):
st.error("Failed to clear PFC counters")
return False
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
return True
def show_pfc_counters(dut, **kwargs):
"""
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:return:
"""
cli_type = st.get_ui_type(dut, **kwargs)
ports = make_list(kwargs.get('ports', []))
if cli_type == 'click':
command = "show pfc counters"
rv = st.show(dut, command, type=cli_type)
elif cli_type == 'klish':
temp_vars = st.get_testbed_vars()
if not ports:
port = 'Eth all' if temp_vars.config.ifname_type == 'alias' else 'Ethernet all'
command = "show qos interface {} priority-flow-control statistics".format(port)
rv = st.show(dut, command, type=cli_type)
else:
rv = list()
for port in ports:
intf_data = get_interface_number_from_name(port)
command = "show qos interface {} {} priority-flow-control statistics".format(intf_data['type'], intf_data['number'])
rv.extend(st.show(dut, command, type=cli_type))
elif cli_type in ['rest-patch', 'rest-put']:
rest_urls = st.get_datastore(dut, 'rest_urls')
if not ports:
url = rest_urls['get_pfc_all_counters']
out = get_rest(dut, rest_url=url, timeout=120)
if not (('output' in out) and out.get('output')):
st.error("No data found in output: {}".format(out))
return False
rv = _get_rest_pfc_counters_all(out['output'])
else:
rv = list()
for port in ports:
url = rest_urls['get_pfc_pause_counters'].format(port)
out = get_rest(dut, rest_url=url, timeout=120)
if not (('output' in out) and out.get('output')):
st.error("No data found in output: {}".format(out))
return False
rv.extend(_get_rest_pfc_counters(out['output'], port))
else:
st.error("Unsupported CLI TYPE {}".format(cli_type))
return False
output = [{k: v.replace('received', 'Port Rx').replace('transmitted', 'Port Tx').replace(',', '') for k, v in each.items()} for each in rv]
return output
def get_pfc_counters(dut,interface,mode,*argv):
"""
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param interface:
:param mode:
:param argv: 'pfc0','pfc1','pfc2','pfc3','pfc4','pfc5','pfc6','pfc7'
:return:
"""
output = show_pfc_counters(dut)
port_mode = 'Port Tx'
if mode.lower() == 'rx':
port_mode = 'Port Rx'
entries = filter_and_select(output,argv,{'port':interface,'port_mode':port_mode})
return entries
def get_pfc_counters_all(dut, interface, mode='tx'):
"""
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param interface:
:param mode:
:param kwargs:
:return:
"""
output = show_pfc_counters(dut)
port_mode = 'Port Tx'
if mode.lower() == 'rx':
port_mode = 'Port Rx'
match = {'port':interface,'port_mode':port_mode}
entries = filter_and_select(output, None, match)
if not entries:
st.log("No queue couters found on {} for {} {}".format(dut, interface, mode))
return (False,0)
new_entry = {}
for i in entries[0]:
new_entry[i]=entries[0][i].replace(",","")
return (True,new_entry)
def verify_pfc_counters(dut,interface,mode='tx',**kwargs):
"""
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param interface:
:param mode:
:param kwargs:
:return:
"""
output = show_pfc_counters(dut)
port_mode = 'Port Tx'
if mode.lower() == 'rx':
port_mode = 'Port Rx'
for each in kwargs.keys():
match = {'port':interface,'port_mode':port_mode,each:kwargs[each]}
entries = filter_and_select(output, None, match)
if not entries:
st.log("{} and {} is not match ".format(each, kwargs[each]))
return False
return True
def config_pfc_buffer_prameters(dut, hwsku, ports_dict, **kwargs):
"""
Autor: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
To configure the platform specific buffer constants
:param hwsku:
:type hwsku:
:param dut:
:type dut:
:param ports_dict:
:type ports_dict:
"""
constants = st.get_datastore(dut, "constants")
ports_show = interface_status_show(dut, list(ports_dict.keys()))
port_speed = dict()
core_buffer_config = kwargs.get('core_buffer_config', False)
apply_buffer_config = kwargs.get('apply_buffer_config', True)
for port in ports_dict.keys():
port_speed[port] = filter_and_select(ports_show, ['speed'], {'interface': port})[0]['speed'].replace('G', '000')
native_ports_map_dict = {port:st.get_other_names(dut, [port])[0] if '/' in port else port for port in ports_dict.keys()}
retval = dict()
update_retval = lambda entries: {retval.update(entry) for entry in entries}
if hwsku.lower() in constants['TH_PLATFORMS']:
if core_buffer_config:
buffer_pool = {"BUFFER_POOL": {"egress_lossless_pool": {"mode": "static", "size": "12766208", "type": "egress"},
"egress_lossy_pool": {"mode": "dynamic", "size": "7326924", "type": "egress"},
"ingress_lossless_pool": {"mode": "dynamic", "size": "12766208", "type": "ingress", "xoff": "4625920"}}}
buffer_profile = {"BUFFER_PROFILE": {"egress_lossless_profile": {"pool": "egress_lossless_pool", "size": "0", "static_th": "12766208"}, "egress_lossy_profile": {"dynamic_th": "3", "pool": "egress_lossless_pool", "size": "1518"},
"ingress_lossy_profile": {"dynamic_th": "3", "pool": "ingress_lossless_pool", "size": "0"},
"pg_lossless_10000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"},
"pg_lossless_25000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"},
"pg_lossless_40000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"},
"pg_lossless_100000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"}}}
cable_length_config = {"CABLE_LENGTH": {"AZURE": {native_ports_map_dict[port]: "300m" for port in ports_dict.keys()}}}
update_retval([buffer_pool, buffer_profile, cable_length_config])
if apply_buffer_config:
ingress_profile_mapping = {'100000' : 'pg_lossless_100000_300m_profile', '40000' : 'pg_lossless_40000_300m_profile', '25000' : 'pg_lossless_25000_300m_profile', '10000' : 'pg_lossless_10000_300m_profile', 'lossy_profile': 'ingress_lossy_profile'}
egress_profile_mapping = {'lossy_profile' : 'egress_lossy_profile', 'lossless_profile' : 'egress_lossless_profile'}
buffer_pg = dict()
buffer_queue = dict()
get_profile = lambda profile: {"profile": "{}".format(profile)}
for port, queue_info in ports_dict.items():
native_port = native_ports_map_dict[port]
for queue_type, queues in queue_info.items():
buffer_pg.update({"{}|{}".format(native_port, queue):get_profile(ingress_profile_mapping[port_speed[port]] if queue_type == 'lossless_queues' else ingress_profile_mapping['lossy_profile']) for queue in queues})
buffer_queue.update({"{}|{}".format(native_port, queue):get_profile(egress_profile_mapping['lossless_profile'] if queue_type == 'lossless_queues' else egress_profile_mapping['lossy_profile']) for queue in queues})
buffer_pg = {"BUFFER_PG":buffer_pg}
buffer_queue = {"BUFFER_QUEUE":buffer_queue}
update_retval([buffer_pg, buffer_queue])
st.debug(retval)
elif hwsku.lower() in constants['TH2_PLATFORMS']:
if core_buffer_config:
buffer_pool = {"BUFFER_POOL": {"egress_lossless_pool": {"mode": "static", "size": "12766208", "type": "egress"},
"egress_lossy_pool": {"mode": "dynamic", "size": "7326924", "type": "egress"},
"ingress_lossless_pool": {"mode": "dynamic", "size": "12766208", "type": "ingress", "xoff": "4625920"}}}
buffer_profile = {"BUFFER_PROFILE": {"egress_lossless_profile": {"pool": "egress_lossless_pool", "size": "0", "static_th": "12766208"}, "egress_lossy_profile": {"dynamic_th": "3", "pool": "egress_lossless_pool", "size": "1518"},
"ingress_lossy_profile": {"dynamic_th": "3","pool": "ingress_lossless_pool","size": "0"},
"pg_lossless_10000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"},
"pg_lossless_25000_300m_profile": {"dynamic_th": "-3","pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"},
"pg_lossless_40000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"},
"pg_lossless_100000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"}}}
cable_length_config = {"CABLE_LENGTH": {"AZURE": {native_ports_map_dict[port]: "300m" for port in ports_dict.keys()}}}
update_retval([buffer_pool, buffer_profile, cable_length_config])
if apply_buffer_config:
ingress_profile_mapping = {'10000' : 'pg_lossless_10000_300m_profile', '25000' : 'pg_lossless_25000_300m_profile', '40000' : 'pg_lossless_40000_300m_profile', '100000' : 'pg_lossless_100000_300m_profile', 'lossy_profile': 'ingress_lossy_profile'}
egress_profile_mapping = {'lossy_profile' : 'egress_lossy_profile', 'lossless_profile' : 'egress_lossless_profile'}
buffer_pg = dict()
buffer_queue = dict()
get_profile = lambda profile: {"profile": "{}".format(profile)}
for port, queue_info in ports_dict.items():
native_port = native_ports_map_dict[port]
for queue_type, queues in queue_info.items():
buffer_pg.update({"{}|{}".format(native_port, queue):get_profile(ingress_profile_mapping[port_speed[port]] if queue_type == 'lossless_queues' else ingress_profile_mapping['lossy_profile']) for queue in queues})
buffer_queue.update({"{}|{}".format(native_port, queue):get_profile(egress_profile_mapping['lossless_profile'] if queue_type == 'lossless_queues' else egress_profile_mapping['lossy_profile']) for queue in queues})
buffer_pg = {"BUFFER_PG":buffer_pg}
buffer_queue = {"BUFFER_QUEUE":buffer_queue}
update_retval([buffer_pg, buffer_queue])
st.debug(retval)
elif hwsku.lower() in constants['TH3_PLATFORMS']:
if core_buffer_config:
buffer_pool = {"BUFFER_POOL": {"egress_lossy_pool": {"mode": "dynamic", "size": "67108864", "type": "egress"},
"ingress_lossless_pool": {"mode": "dynamic", "size": "59001152", "type": "ingress", "xoff": "7428992"}}}
buffer_profile = {"BUFFER_PROFILE": {"egress_lossless_profile": {"dynamic_th": "3", "pool": "egress_lossy_pool", "size": "0"},
"egress_lossy_profile": {"dynamic_th": "3", "pool": "egress_lossy_pool", "size": "0"},
"ingress_lossy_profile": {"pool": "ingress_lossless_pool", "size": "0", "static_th": "67108864"},
"pg_lossless_10000_40m_profile": {"dynamic_th": "-2", "pool": "ingress_lossless_pool", "size": "1270", "xoff": "190500", "xon": "0", "xon_offset": "2540"},
"pg_lossless_50000_40m_profile": {"dynamic_th": "-2", "pool": "ingress_lossless_pool", "size": "1270", "xoff": "190500", "xon": "0", "xon_offset": "2540"},
"pg_lossless_100000_40m_profile": {"dynamic_th": "-2", "pool": "ingress_lossless_pool", "size": "1270", "xoff": "190500", "xon": "0", "xon_offset": "2540"},
"pg_lossless_200000_40m_profile": {"dynamic_th": "-2", "pool": "ingress_lossless_pool", "size": "1270", "xoff": "190500", "xon": "0", "xon_offset": "2540"},
"pg_lossless_400000_40m_profile": {"dynamic_th": "-2", "pool": "ingress_lossless_pool", "size": "1270","xoff": "190500", "xon": "0", "xon_offset": "2540"}}}
cable_length_config = {"CABLE_LENGTH": {"AZURE": {native_ports_map_dict[port]: "40m" for port in ports_dict.keys()}}}
update_retval([buffer_pool, buffer_profile, cable_length_config])
if apply_buffer_config:
ingress_profile_mapping = {'400000' : 'pg_lossless_400000_40m_profile', '200000' : 'pg_lossless_200000_40m_profile', '100000' : 'pg_lossless_100000_40m_profile', '50000': 'pg_lossless_50000_40m_profile', '10000' : 'pg_lossless_10000_40m_profile', 'lossy_profile': 'ingress_lossy_profile'}
egress_profile_mapping = {'lossy_profile' : 'egress_lossy_profile', 'lossless_profile' : 'egress_lossless_profile'}
buffer_pg = dict()
buffer_queue = dict()
get_profile = lambda profile: {"profile": "{}".format(profile)}
for port, queue_info in ports_dict.items():
native_port = native_ports_map_dict[port]
for queue_type, queues in queue_info.items():
buffer_pg.update({"{}|{}".format(native_port, queue):get_profile(ingress_profile_mapping[port_speed[port]] if queue_type == 'lossless_queues' else ingress_profile_mapping['lossy_profile']) for queue in queues})
buffer_queue.update({"{}|{}".format(native_port, queue):get_profile(egress_profile_mapping['lossless_profile'] if queue_type == 'lossless_queues' else egress_profile_mapping['lossy_profile']) for queue in queues})
buffer_pg = {"BUFFER_PG":buffer_pg}
buffer_queue = {"BUFFER_QUEUE":buffer_queue}
update_retval([buffer_pg, buffer_queue])
st.debug(retval)
elif hwsku.lower() in constants['TD2_PLATFORMS']:
if core_buffer_config:
buffer_pool = {"BUFFER_POOL": {"egress_lossless_pool": {"mode": "static", "size": "12766208", "type": "egress"},
"egress_lossy_pool": {"mode": "dynamic", "size": "7326924", "type": "egress"},
"ingress_lossless_pool": {"mode": "dynamic", "size": "12766208", "type": "ingress"}}}
buffer_profile = {"BUFFER_PROFILE": {"egress_lossless_profile": {"pool": "egress_lossless_pool", "size": "0", "static_th": "12766208"}, "egress_lossy_profile": {"dynamic_th": "3", "pool": "egress_lossless_pool", "size": "1518"},
"ingress_lossy_profile": {"dynamic_th": "3", "pool": "ingress_lossless_pool", "size": "0"},
"pg_lossless_1000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"},
"pg_lossless_10000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"},
"pg_lossless_40000_300m_profile": {"dynamic_th": "-3", "pool": "ingress_lossless_pool", "size": "56368", "xoff": "55120", "xon": "18432", "xon_offset": "2496"}}}
cable_length_config = {"CABLE_LENGTH": {"AZURE": {native_ports_map_dict[port]: "300m" for port in ports_dict.keys()}}}
update_retval([buffer_pool, buffer_profile, cable_length_config])
if apply_buffer_config:
ingress_profile_mapping = {'10000' : 'pg_lossless_10000_300m_profile', '40000' : 'pg_lossless_40000_300m_profile', '1000' : 'pg_lossless_1000_300m_profile', 'lossy_profile': 'ingress_lossy_profile'}
egress_profile_mapping = {'lossy_profile' : 'egress_lossy_profile', 'lossless_profile' : 'egress_lossless_profile'}
buffer_pg = dict()
buffer_queue = dict()
get_profile = lambda profile: {"profile": "{}".format(profile)}
for port, queue_info in ports_dict.items():
native_port = native_ports_map_dict[port]
for queue_type, queues in queue_info.items():
buffer_pg.update({"{}|{}".format(native_port, queue):get_profile(ingress_profile_mapping[port_speed[port]] if queue_type == 'lossless_queues' else ingress_profile_mapping['lossy_profile']) for queue in queues})
buffer_queue.update({"{}|{}".format(native_port, queue):get_profile(egress_profile_mapping['lossless_profile'] if queue_type == 'lossless_queues' else egress_profile_mapping['lossy_profile']) for queue in queues})
buffer_pg = {"BUFFER_PG":buffer_pg}
buffer_queue = {"BUFFER_QUEUE":buffer_queue}
update_retval([buffer_pg, buffer_queue])
st.debug(retval)
elif hwsku.lower() in constants['TD3_PLATFORMS']+constants['MV2_PLATFORMS']:
if core_buffer_config:
buffer_pool = {"BUFFER_POOL": {"egress_lossless_pool": {"mode": "static", "size": "33004032", "type": "egress"},
"egress_lossy_pool": {"mode": "dynamic", "size": "12766208", "type": "egress"},
"ingress_lossless_pool": {"mode": "dynamic", "size": "12766208", "type": "ingress", "xoff": "196608"}}}
buffer_profile = {"BUFFER_PROFILE": {"egress_lossless_profile": {"pool": "egress_lossless_pool", "size": "0", "static_th": "33004032"}, "egress_lossy_profile": {"dynamic_th": "3", "pool": "egress_lossless_pool", "size": "1518"},
"ingress_lossy_profile": {"dynamic_th": "3", "pool": "ingress_lossless_pool", "size": "0"},
"pg_lossless_10000_300m_profile": {"dynamic_th": "1", "pool": "ingress_lossless_pool", "size": "9427", "xoff": "50176", "xon": "0", "xon_offset": "3584"},
"pg_lossless_25000_300m_profile": {"dynamic_th": "1", "pool": "ingress_lossless_pool", "size": "9427", "xoff": "50176", "xon": "0", "xon_offset": "3584"},
"pg_lossless_40000_300m_profile": {"dynamic_th": "1", "pool": "ingress_lossless_pool", "size": "9427", "xoff": "50176", "xon": "0", "xon_offset": "3584"},
"pg_lossless_100000_300m_profile": {"dynamic_th": "1", "pool": "ingress_lossless_pool", "size": "9427", "xoff": "50176", "xon": "0", "xon_offset": "3584"}}}
if hwsku.lower() in ['quanta-ix8a-bwde-56x', 'accton-as4630-54pe']:
buffer_profile['BUFFER_PROFILE'].update(pg_lossless_1000_300m_profile={"dynamic_th": "1", "pool": "ingress_lossless_pool", "size": "9427", "xoff": "50176", "xon": "0", "xon_offset": "3584"})
cable_length_config = {"CABLE_LENGTH": {"AZURE": {native_ports_map_dict[port]: "300m" for port in ports_dict.keys()}}}
update_retval([buffer_pool, buffer_profile, cable_length_config])
if apply_buffer_config:
ingress_profile_mapping = {'100000' : 'pg_lossless_100000_300m_profile', '40000' : 'pg_lossless_40000_300m_profile', '25000' : 'pg_lossless_25000_300m_profile', '10000' : 'pg_lossless_10000_300m_profile', 'lossy_profile': 'ingress_lossy_profile'}
if hwsku.lower() in ['quanta-ix8a-bwde-56x', 'accton-as4630-54pe']:
ingress_profile_mapping.update({'1000': 'pg_lossless_1000_300m_profile'})
egress_profile_mapping = {'lossy_profile' : 'egress_lossy_profile', 'lossless_profile' : 'egress_lossless_profile'}
buffer_pg = dict()
buffer_queue = dict()
get_profile = lambda profile: {"profile": "{}".format(profile)}
for port, queue_info in ports_dict.items():
native_port = native_ports_map_dict[port]
for queue_type, queues in queue_info.items():
buffer_pg.update({"{}|{}".format(native_port, queue):get_profile(ingress_profile_mapping[port_speed[port]] if queue_type == 'lossless_queues' else ingress_profile_mapping['lossy_profile']) for queue in queues})
buffer_queue.update({"{}|{}".format(native_port, queue):get_profile(egress_profile_mapping['lossless_profile'] if queue_type == 'lossless_queues' else egress_profile_mapping['lossy_profile']) for queue in queues})
buffer_pg = {"BUFFER_PG":buffer_pg}
buffer_queue = {"BUFFER_QUEUE":buffer_queue}
update_retval([buffer_pg, buffer_queue])
st.debug(retval)
else:
st.error("Invalid platform")
return False
if retval:
final_data = json.dumps(retval)
st.apply_json2(dut, final_data)
return True
def _get_rest_pfc_wd_stats(data, port):
"""
To get processed output from REST PFC watchdog statistics per port
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param : data
:return:
"""
retval = list()
if data.get("openconfig-qos-ext:pfc-queue") and data["openconfig-qos-ext:pfc-queue"].get("pfc-queue") and isinstance(data["openconfig-qos-ext:pfc-queue"]["pfc-queue"], list):
entries = data["openconfig-qos-ext:pfc-queue"]["pfc-queue"]
for entry in entries:
temp = dict()
if 'queue' in entry and entry.get('statistics'):
stats = entry['statistics']
temp['port'] = port
temp['status'] = 'N/A'
temp['queue'] = str(entry['queue'])
if 'rx-drop' in stats:
temp['rx_drop'] = str(stats['rx-drop'])
if 'rx-drop-last' in stats:
temp['rx_last_drop'] = str(stats['rx-drop-last'])
if 'rx-ok' in stats:
temp['rx_ok'] = str(stats['rx-ok'])
if 'rx-ok-last' in stats:
temp['rx_last_ok'] = str(stats['rx-ok-last'])
if 'storm-detected' in stats:
temp['storm_detect'] = str(stats['storm-detected'])
if 'storm-restored' in stats:
temp['storm_restore'] = str(stats['storm-restored'])
if 'tx-drop' in stats:
temp['tx_drop'] = str(stats['tx-drop'])
if 'tx-drop-last' in stats:
temp['tx_last_drop'] = str(stats['tx-drop-last'])
if 'tx-ok' in stats:
temp['tx_ok'] = str(stats['tx-ok'])
if 'tx-ok-last' in stats:
temp['tx_last_ok'] = str(stats['tx-ok-last'])
retval.append(temp)
st.debug(retval)
return retval
def _get_rest_pfc_wd_stats_all(data):
"""
To get processed output from REST PFC watchdog statistics for all ports
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param : data
:return:
"""
retval = list()
if "openconfig-qos:interface" in data and data.get("openconfig-qos:interface") and isinstance(data["openconfig-qos:interface"], list):
entries = data["openconfig-qos:interface"]
for entry in entries:
if "interface-id" in entry and entry.get("openconfig-qos-ext:pfc") and entry["openconfig-qos-ext:pfc"].get("pfc-queue") and entry["openconfig-qos-ext:pfc"]["pfc-queue"].get("pfc-queue") and isinstance(entry["openconfig-qos-ext:pfc"]["pfc-queue"]["pfc-queue"], list):
pfcwd_stats = entry["openconfig-qos-ext:pfc"]["pfc-queue"]["pfc-queue"]
for pfcwd_stat in pfcwd_stats:
temp = dict()
if 'queue' in pfcwd_stat and pfcwd_stat.get('statistics'):
stats = pfcwd_stat['statistics']
temp['port'] = entry['interface-id']
temp['status'] = 'N/A'
temp['queue'] = str(pfcwd_stat['queue'])
if 'rx-drop' in stats:
temp['rx_drop'] = str(stats['rx-drop'])
if 'rx-drop-last' in stats:
temp['rx_last_drop'] = str(stats['rx-drop-last'])
if 'rx-ok' in stats:
temp['rx_ok'] = str(stats['rx-ok'])
if 'rx-ok-last' in stats:
temp['rx_last_ok'] = str(stats['rx-ok-last'])
if 'storm-detected' in stats:
temp['storm_detect'] = str(stats['storm-detected'])
if 'storm-restored' in stats:
temp['storm_restore'] = str(stats['storm-restored'])
if 'tx-drop' in stats:
temp['tx_drop'] = str(stats['tx-drop'])
if 'tx-drop-last' in stats:
temp['tx_last_drop'] = str(stats['tx-drop-last'])
if 'tx-ok' in stats:
temp['tx_ok'] = str(stats['tx-ok'])
if 'tx-ok-last' in stats:
temp['tx_last_ok'] = str(stats['tx-ok-last'])
retval.append(temp)
st.debug(retval)
return retval
def _get_rest_pfc_counters(data, port):
"""
To get processed output from REST PFC statistics per port
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param : data
:return:
"""
rx_entry = {'port': port, 'port_mode': 'received'}
tx_entry = {'port': port, 'port_mode': 'transmitted'}
if "openconfig-qos-ext:pfc-priority" in data and data["openconfig-qos-ext:pfc-priority"] and isinstance(data["openconfig-qos-ext:pfc-priority"], list):
entries = data["openconfig-qos-ext:pfc-priority"]
for entry in entries:
if entry.get('state') and entry['state'].get('statistics') and 'dot1p' in entry['state']:
stats = entry['state']['statistics']
if 'pause-frames-rx' in stats:
rx_entry['pfc{}'.format(entry['state']['dot1p'])] = str(stats['pause-frames-rx'])
if 'pause-frames-tx' in stats:
tx_entry['pfc{}'.format(entry['state']['dot1p'])] = str(stats['pause-frames-tx'])
st.debug([rx_entry, tx_entry])
return [rx_entry, tx_entry]
def _get_rest_pfc_counters_all(data):
"""
To get processed output from REST PFC statistics for all ports
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom.com)
:param : data
:return:
"""
retval = list()
if "openconfig-qos:interface" in data and data.get("openconfig-qos:interface") and isinstance(data["openconfig-qos:interface"], list):
entries = data["openconfig-qos:interface"]
for entry in entries:
if "interface-id" in entry and entry.get("openconfig-qos-ext:pfc") and entry["openconfig-qos-ext:pfc"].get("pfc-priorities") and entry["openconfig-qos-ext:pfc"]["pfc-priorities"].get("pfc-priority") and isinstance(entry["openconfig-qos-ext:pfc"]["pfc-priorities"]["pfc-priority"], list):
pfc_stats = entry["openconfig-qos-ext:pfc"]["pfc-priorities"]["pfc-priority"]
rx_entry = {'port': entry["interface-id"], 'port_mode': 'received'}
tx_entry = {'port': entry["interface-id"], 'port_mode': 'transmitted'}
for pfc_stat in pfc_stats:
if pfc_stat.get('state') and pfc_stat['state'].get('statistics') and 'dot1p' in pfc_stat['state']:
stats = pfc_stat['state']['statistics']
if 'pause-frames-rx' in stats:
rx_entry['pfc{}'.format(pfc_stat['state']['dot1p'])] = str(stats['pause-frames-rx'])
if 'pause-frames-tx' in stats:
tx_entry['pfc{}'.format(pfc_stat['state']['dot1p'])] = str(stats['pause-frames-tx'])
retval.extend([rx_entry, tx_entry])
st.debug(retval)
return retval
def _get_rest_pfc_params_config(data):
"""
To get PFC parameters configured on port from REST output
Author: Jagadish Chatrasi (jagadish.chatrasi@broadcom)
:param data:
:type data:
"""
retval = dict()
if "openconfig-qos-ext:pfc" in data and "state" in data["openconfig-qos-ext:pfc"] and "asymmetric" in data["openconfig-qos-ext:pfc"]["state"] and data["openconfig-qos-ext:pfc"]["state"]["asymmetric"]:
retval['pfc_asymmetric'] = "on"
else:
retval['pfc_asymmetric'] = "off"
if "openconfig-qos-ext:pfc" in data and "pfc-priorities" in data["openconfig-qos-ext:pfc"] and "pfc-priority" in data["openconfig-qos-ext:pfc"]["pfc-priorities"]:
priority_entries = data["openconfig-qos-ext:pfc"]["pfc-priorities"]["pfc-priority"]
if isinstance(priority_entries, list):
pfc_lossless_priorities = [str(priority_entry['state']['dot1p']) for priority_entry in priority_entries if 'state' in priority_entry and 'dot1p' in priority_entry['state'] and 'enable' in priority_entry['state'] and priority_entry['state']['enable']]
retval['pfc_priority'] = ','.join(pfc_lossless_priorities) if pfc_lossless_priorities else ''
else:
retval['pfc_priority'] = ''
if "openconfig-qos-ext:pfc" in data and "watchdog" in data["openconfig-qos-ext:pfc"] and "state" in data["openconfig-qos-ext:pfc"]["watchdog"]:
wathdog_data = data["openconfig-qos-ext:pfc"]["watchdog"]["state"]
retval['action'] = wathdog_data["action"].lower() if "action" in wathdog_data else "N/A"
retval['detectiontime'] = str(wathdog_data["detection-time"]) if "detection-time" in wathdog_data else "0"
retval['restorationtime'] = str(wathdog_data["restoration-time"]) if "restoration-time" in wathdog_data else "0"
else:
retval['action'], retval['detectiontime'], retval['restorationtime'] = "N/A", "0", "0"
st.debug([retval])
return [retval]
| 55.020942 | 300 | 0.602931 |
21819fb02e3d976ea32d00521909365d93e5b2b0 | 2,622 | py | Python | Problem4.py | taking1fortheteam/pands-problem-set | 576c1049a19bb642e56e08733200cb57d3274a50 | [
"Apache-2.0"
] | null | null | null | Problem4.py | taking1fortheteam/pands-problem-set | 576c1049a19bb642e56e08733200cb57d3274a50 | [
"Apache-2.0"
] | null | null | null | Problem4.py | taking1fortheteam/pands-problem-set | 576c1049a19bb642e56e08733200cb57d3274a50 | [
"Apache-2.0"
] | null | null | null | # Aidan Conlon - 22 March 2019
# This is the solution to Problem 4
# Write a program that asks the user to input any positive integer and outputs the
# successive values of the following calculation. At each step calculate the next value
# by taking the current value and, if it is even, divide it by two, but if it is odd, multiply
# it by three and add one. Have the program end if the current value is one.
UserInput = input("Please enter any positive integer:") # Print to screen the comment and take a value entered by the user.
# calling it UserInput
try: # Use Try to see if value entered is an Integer or not.
y = int(UserInput) # Let y equal to the value entered by the user as an integer - if possible
if y <= 0: # If the value entered is less than 0 then it is not a positive integer and
print("That is not a positive integer") # print to screen letting user know it is anot a positive integer and then
quit() # quit.
while y > 0: # As long as the user entered a positive integer then using a while loop , so long as y is greater than 0 check for the following conditions
if y == 1: # If y is 1
quit() # then quit
if y %2 != 0: # If y is divisible by 2 with a denominator
y = ((y * 3) + 1) # multiply y by 3 and add 1
print(int(y) , end=" ") # print new value of y (with no decimal point (int)) to screen with no carriage return, rather a space for next value
if y %2 == 0: # If y is divisible by 2 with no denominator then
y = y / 2 # divide y by 2
print(int(y) , end =" ") # and print this new value (with no decimal point (int)) to screen with no carriage return, rather a space for next value
except ValueError: # The Try Exception means that the value entered is not an integer so
print("That is not an integer") # Print to screen it is not an integer and
quit() # finish.
| 87.4 | 200 | 0.494279 |
df98aae12eb9e2c90499328065dc3c12c78d49fb | 20,942 | py | Python | test/functional/feature_pruning.py | satcoin-dev/satcoin | a68f5965a8c28cfcaf8855a661ea3f15de9ae7d5 | [
"MIT"
] | 4 | 2021-02-28T04:34:58.000Z | 2021-09-14T15:25:31.000Z | test/functional/feature_pruning.py | satcoin-dev/satcoin | a68f5965a8c28cfcaf8855a661ea3f15de9ae7d5 | [
"MIT"
] | null | null | null | test/functional/feature_pruning.py | satcoin-dev/satcoin | a68f5965a8c28cfcaf8855a661ea3f15de9ae7d5 | [
"MIT"
] | 1 | 2021-06-18T13:13:17.000Z | 2021-06-18T13:13:17.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the pruning code.
WARNING:
This test uses 4GB of disk space.
This test takes 30 mins or more (up to 2 hours)
"""
import os
from test_framework.blocktools import create_coinbase
from test_framework.messages import CBlock, ToHex
from test_framework.script import CScript, OP_RETURN, OP_NOP
from test_framework.test_framework import SatcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
connect_nodes,
disconnect_nodes,
wait_until,
)
# Rescans start at the earliest block up to 2 hours before a key timestamp, so
# the manual prune RPC avoids pruning blocks in the same window to be
# compatible with pruning based on key creation time.
TIMESTAMP_WINDOW = 2 * 60 * 60
def mine_large_blocks(node, n):
# Make a large scriptPubKey for the coinbase transaction. This is OP_RETURN
# followed by 950k of OP_NOP. This would be non-standard in a non-coinbase
# transaction but is consensus valid.
# Set the nTime if this is the first time this function has been called.
# A static variable ensures that time is monotonicly increasing and is therefore
# different for each block created => blockhash is unique.
if "nTimes" not in mine_large_blocks.__dict__:
mine_large_blocks.nTime = 0
# Get the block parameters for the first block
big_script = CScript([OP_RETURN] + [OP_NOP] * 950000)
best_block = node.getblock(node.getbestblockhash())
height = int(best_block["height"]) + 1
mine_large_blocks.nTime = max(mine_large_blocks.nTime, int(best_block["time"])) + 1
previousblockhash = int(best_block["hash"], 16)
for _ in range(n):
# Build the coinbase transaction (with large scriptPubKey)
coinbase_tx = create_coinbase(height)
coinbase_tx.vin[0].nSequence = 2 ** 32 - 1
coinbase_tx.vout[0].scriptPubKey = big_script
coinbase_tx.rehash()
# Build the block
block = CBlock()
block.nVersion = best_block["version"]
block.hashPrevBlock = previousblockhash
block.nTime = mine_large_blocks.nTime
block.nBits = int('207fffff', 16)
block.nNonce = 0
block.vtx = [coinbase_tx]
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
# Submit to the node
node.submitblock(ToHex(block))
previousblockhash = block.sha256
height += 1
mine_large_blocks.nTime += 1
def calc_usage(blockdir):
return sum(os.path.getsize(blockdir + f) for f in os.listdir(blockdir) if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.)
class PruneTest(SatcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 6
self.supports_cli = False
# Create nodes 0 and 1 to mine.
# Create node 2 to test pruning.
self.full_node_default_args = ["-maxreceivebuffer=20000", "-checkblocks=5"]
# Create nodes 3 and 4 to test manual pruning (they will be re-started with manual pruning later)
# Create nodes 5 to test wallet in prune mode, but do not connect
self.extra_args = [
self.full_node_default_args,
self.full_node_default_args,
["-maxreceivebuffer=20000", "-prune=550"],
["-maxreceivebuffer=20000"],
["-maxreceivebuffer=20000"],
["-prune=550"],
]
self.rpc_timeout = 120
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.setup_nodes()
self.prunedir = os.path.join(self.nodes[2].datadir, self.chain, 'blocks', '')
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
connect_nodes(self.nodes[0], 4)
self.sync_blocks(self.nodes[0:5])
def setup_nodes(self):
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
for n in self.nodes:
n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase', rescan=False)
def create_big_chain(self):
# Start by creating some coinbases we can spend later
self.nodes[1].generate(200)
self.sync_blocks(self.nodes[0:2])
self.nodes[0].generate(150)
# Then mine enough full blocks to create more than 550MiB of data
mine_large_blocks(self.nodes[0], 645)
self.sync_blocks(self.nodes[0:5])
def test_height_min(self):
assert os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early"
self.log.info("Success")
self.log.info("Though we're already using more than 550MiB, current usage: %d" % calc_usage(self.prunedir))
self.log.info("Mining 25 more blocks should cause the first block file to be pruned")
# Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this
mine_large_blocks(self.nodes[0], 25)
# Wait for blk00000.dat to be pruned
wait_until(lambda: not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), timeout=30)
self.log.info("Success")
usage = calc_usage(self.prunedir)
self.log.info("Usage should be below target: %d" % usage)
assert_greater_than(550, usage)
def create_chain_with_staleblocks(self):
# Create stale blocks in manageable sized chunks
self.log.info("Mine 24 (stale) blocks on Node 1, followed by 25 (main chain) block reorg from Node 0, for 12 rounds")
for j in range(12):
# Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain
# Node 2 stays connected, so it hears about the stale blocks and then reorg's when node0 reconnects
disconnect_nodes(self.nodes[0], 1)
disconnect_nodes(self.nodes[0], 2)
# Mine 24 blocks in node 1
mine_large_blocks(self.nodes[1], 24)
# Reorg back with 25 block chain from node 0
mine_large_blocks(self.nodes[0], 25)
# Create connections in the order so both nodes can see the reorg at the same time
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
self.sync_blocks(self.nodes[0:3])
self.log.info("Usage can be over target because of high stale rate: %d" % calc_usage(self.prunedir))
def reorg_test(self):
# Node 1 will mine a 300 block chain starting 287 blocks back from Node 0 and Node 2's tip
# This will cause Node 2 to do a reorg requiring 288 blocks of undo data to the reorg_test chain
height = self.nodes[1].getblockcount()
self.log.info("Current block height: %d" % height)
self.forkheight = height - 287
self.forkhash = self.nodes[1].getblockhash(self.forkheight)
self.log.info("Invalidating block %s at height %d" % (self.forkhash, self.forkheight))
self.nodes[1].invalidateblock(self.forkhash)
# We've now switched to our previously mined-24 block fork on node 1, but that's not what we want
# So invalidate that fork as well, until we're on the same chain as node 0/2 (but at an ancestor 288 blocks ago)
mainchainhash = self.nodes[0].getblockhash(self.forkheight - 1)
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
while curhash != mainchainhash:
self.nodes[1].invalidateblock(curhash)
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
assert self.nodes[1].getblockcount() == self.forkheight - 1
self.log.info("New best height: %d" % self.nodes[1].getblockcount())
# Disconnect node1 and generate the new chain
disconnect_nodes(self.nodes[0], 1)
disconnect_nodes(self.nodes[1], 2)
self.log.info("Generating new longer chain of 300 more blocks")
self.nodes[1].generate(300)
self.log.info("Reconnect nodes")
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
self.sync_blocks(self.nodes[0:3], timeout=120)
self.log.info("Verify height on node 2: %d" % self.nodes[2].getblockcount())
self.log.info("Usage possibly still high because of stale blocks in block files: %d" % calc_usage(self.prunedir))
self.log.info("Mine 220 more large blocks so we have requisite history")
mine_large_blocks(self.nodes[0], 220)
self.sync_blocks(self.nodes[0:3], timeout=120)
usage = calc_usage(self.prunedir)
self.log.info("Usage should be below target: %d" % usage)
assert_greater_than(550, usage)
def reorg_back(self):
# Verify that a block on the old main chain fork has been pruned away
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(pruning, no data)']):
self.nodes[2].verifychain(checklevel=4, nblocks=0)
self.log.info("Will need to redownload block %d" % self.forkheight)
# Verify that we have enough history to reorg back to the fork point
# Although this is more than 288 blocks, because this chain was written more recently
# and only its other 299 small and 220 large blocks are in the block files after it,
# it is expected to still be retained
self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight))
first_reorg_height = self.nodes[2].getblockcount()
curchainhash = self.nodes[2].getblockhash(self.mainchainheight)
self.nodes[2].invalidateblock(curchainhash)
goalbestheight = self.mainchainheight
goalbesthash = self.mainchainhash2
# As of 0.10 the current block download logic is not able to reorg to the original chain created in
# create_chain_with_stale_blocks because it doesn't know of any peer that's on that chain from which to
# redownload its missing blocks.
# Invalidate the reorg_test chain in node 0 as well, it can successfully switch to the original chain
# because it has all the block data.
# However it must mine enough blocks to have a more work chain than the reorg_test chain in order
# to trigger node 2's block download logic.
# At this point node 2 is within 288 blocks of the fork point so it will preserve its ability to reorg
if self.nodes[2].getblockcount() < self.mainchainheight:
blocks_to_mine = first_reorg_height + 1 - self.mainchainheight
self.log.info("Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: %d" % blocks_to_mine)
self.nodes[0].invalidateblock(curchainhash)
assert_equal(self.nodes[0].getblockcount(), self.mainchainheight)
assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2)
goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1]
goalbestheight = first_reorg_height + 1
self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
# Wait for Node 2 to reorg to proper height
wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight, timeout=900)
assert_equal(self.nodes[2].getbestblockhash(), goalbesthash)
# Verify we can now have the data for a block previously pruned
assert_equal(self.nodes[2].getblock(self.forkhash)["height"], self.forkheight)
def manual_test(self, node_number, use_timestamp):
# at this point, node has 995 blocks and has not yet run in prune mode
self.start_node(node_number)
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
assert_raises_rpc_error(-1, "not in prune mode", node.pruneblockchain, 500)
# now re-start in manual pruning mode
self.stop_node(node_number)
self.start_node(node_number, extra_args=["-prune=1"])
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
def height(index):
if use_timestamp:
return node.getblockheader(node.getblockhash(index))["time"] + TIMESTAMP_WINDOW
else:
return index
def prune(index):
ret = node.pruneblockchain(height=height(index))
assert_equal(ret, node.getblockchaininfo()['pruneheight'])
def has_block(index):
return os.path.isfile(os.path.join(self.nodes[node_number].datadir, self.chain, "blocks", "blk{:05}.dat".format(index)))
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
# Save block transaction count before pruning, assert value
block1_details = node.getblock(node.getblockhash(1))
assert_equal(block1_details["nTx"], len(block1_details["tx"]))
# mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight)
node.generate(6)
assert_equal(node.getblockchaininfo()["blocks"], 1001)
# Pruned block should still know the number of transactions
assert_equal(node.getblockheader(node.getblockhash(1))["nTx"], block1_details["nTx"])
# negative heights should raise an exception
assert_raises_rpc_error(-8, "Negative", node.pruneblockchain, -10)
# height=100 too low to prune first block file so this is a no-op
prune(100)
assert has_block(0), "blk00000.dat is missing when should still be there"
# Does nothing
node.pruneblockchain(height(0))
assert has_block(0), "blk00000.dat is missing when should still be there"
# height=500 should prune first file
prune(500)
assert not has_block(0), "blk00000.dat is still there, should be pruned by now"
assert has_block(1), "blk00001.dat is missing when should still be there"
# height=650 should prune second file
prune(650)
assert not has_block(1), "blk00001.dat is still there, should be pruned by now"
# height=1000 should not prune anything more, because tip-288 is in blk00002.dat.
prune(1000)
assert has_block(2), "blk00002.dat is still there, should be pruned by now"
# advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat)
node.generate(288)
prune(1000)
assert not has_block(2), "blk00002.dat is still there, should be pruned by now"
assert not has_block(3), "blk00003.dat is still there, should be pruned by now"
# stop node, start back up with auto-prune at 550 MiB, make sure still runs
self.stop_node(node_number)
self.start_node(node_number, extra_args=["-prune=550"])
self.log.info("Success")
def wallet_test(self):
# check that the pruning node's wallet is still in good shape
self.log.info("Stop and start pruning node to trigger wallet rescan")
self.stop_node(2)
self.start_node(2, extra_args=["-prune=550"])
self.log.info("Success")
# check that wallet loads successfully when restarting a pruned node after IBD.
# this was reported to fail in #7494.
self.log.info("Syncing node 5 to test wallet")
connect_nodes(self.nodes[0], 5)
nds = [self.nodes[0], self.nodes[5]]
self.sync_blocks(nds, wait=5, timeout=300)
self.stop_node(5) # stop and start to trigger rescan
self.start_node(5, extra_args=["-prune=550"])
self.log.info("Success")
def run_test(self):
self.log.info("Warning! This test requires 4GB of disk space")
self.log.info("Mining a big blockchain of 995 blocks")
self.create_big_chain()
# Chain diagram key:
# * blocks on main chain
# +,&,$,@ blocks on other forks
# X invalidated block
# N1 Node 1
#
# Start by mining a simple chain that all nodes have
# N0=N1=N2 **...*(995)
# stop manual-pruning node with 995 blocks
self.stop_node(3)
self.stop_node(4)
self.log.info("Check that we haven't started pruning yet because we're below PruneAfterHeight")
self.test_height_min()
# Extend this chain past the PruneAfterHeight
# N0=N1=N2 **...*(1020)
self.log.info("Check that we'll exceed disk space target if we have a very high stale block rate")
self.create_chain_with_staleblocks()
# Disconnect N0
# And mine a 24 block chain on N1 and a separate 25 block chain on N0
# N1=N2 **...*+...+(1044)
# N0 **...**...**(1045)
#
# reconnect nodes causing reorg on N1 and N2
# N1=N2 **...*(1020) *...**(1045)
# \
# +...+(1044)
#
# repeat this process until you have 12 stale forks hanging off the
# main chain on N1 and N2
# N0 *************************...***************************(1320)
#
# N1=N2 **...*(1020) *...**(1045) *.. ..**(1295) *...**(1320)
# \ \ \
# +...+(1044) &.. $...$(1319)
# Save some current chain state for later use
self.mainchainheight = self.nodes[2].getblockcount() # 1320
self.mainchainhash2 = self.nodes[2].getblockhash(self.mainchainheight)
self.log.info("Check that we can survive a 288 block reorg still")
self.reorg_test() # (1033, )
# Now create a 288 block reorg by mining a longer chain on N1
# First disconnect N1
# Then invalidate 1033 on main chain and 1032 on fork so height is 1032 on main chain
# N1 **...*(1020) **...**(1032)X..
# \
# ++...+(1031)X..
#
# Now mine 300 more blocks on N1
# N1 **...*(1020) **...**(1032) @@...@(1332)
# \ \
# \ X...
# \ \
# ++...+(1031)X.. ..
#
# Reconnect nodes and mine 220 more blocks on N1
# N1 **...*(1020) **...**(1032) @@...@@@(1552)
# \ \
# \ X...
# \ \
# ++...+(1031)X.. ..
#
# N2 **...*(1020) **...**(1032) @@...@@@(1552)
# \ \
# \ *...**(1320)
# \ \
# ++...++(1044) ..
#
# N0 ********************(1032) @@...@@@(1552)
# \
# *...**(1320)
self.log.info("Test that we can rerequest a block we previously pruned if needed for a reorg")
self.reorg_back()
# Verify that N2 still has block 1033 on current chain (@), but not on main chain (*)
# Invalidate 1033 on current chain (@) on N2 and we should be able to reorg to
# original main chain (*), but will require redownload of some blocks
# In order to have a peer we think we can download from, must also perform this invalidation
# on N0 and mine a new longest chain to trigger.
# Final result:
# N0 ********************(1032) **...****(1553)
# \
# X@...@@@(1552)
#
# N2 **...*(1020) **...**(1032) **...****(1553)
# \ \
# \ X@...@@@(1552)
# \
# +..
#
# N1 doesn't change because 1033 on main chain (*) is invalid
self.log.info("Test manual pruning with block indices")
self.manual_test(3, use_timestamp=False)
self.log.info("Test manual pruning with timestamps")
self.manual_test(4, use_timestamp=True)
self.log.info("Test wallet re-scan")
self.wallet_test()
self.log.info("Done")
if __name__ == '__main__':
PruneTest().main()
| 45.133621 | 140 | 0.612692 |
64efb85c349bc2ebd6567c17f7b64401e5228d72 | 24,505 | py | Python | src/extractor.py | Fireronin/TLoB | 86a2c6d119a0746ece7a9afd179777edc67c4f96 | [
"MIT"
] | null | null | null | src/extractor.py | Fireronin/TLoB | 86a2c6d119a0746ece7a9afd179777edc67c4f96 | [
"MIT"
] | null | null | null | src/extractor.py | Fireronin/TLoB | 86a2c6d119a0746ece7a9afd179777edc67c4f96 | [
"MIT"
] | null | null | null | from copy import deepcopy
from dataclasses import dataclass
import enum
from typing import Dict, List, Tuple, Union
from lark import Lark, Transformer, v_args
import os
from sympy import EX, arg
with open(os.path.join(os.path.join(os.path.dirname(__file__),"..","grammar","tests"), "testFunc.json")) as f:
example_text = f.read()
# with open(os.path.join(os.path.join(os.path.dirname(__file__),"..","grammar","tests"), "funcs.json")) as f:
# example_text = f.read()
#region class definitions
class Position:
def __init__(self,file,line,column) -> None:
self.file = file
self.line = line
self.column = column
def __str__(self) -> str:
return f"{self.file}:{self.line}:{self.column}"
def __repr__(self) -> str:
return f"{self.file}:{self.line}:{self.column}"
class Type_ide:
name: str
def __init__(self,name,package=None,formals=[],is_polymorphic=False,is_primary=False,used_name=None) -> None:
self.name = name
self.used_name = used_name
self.package = package
self.formals = formals
self.is_polymorphic = is_polymorphic
self.is_primary = is_primary
def __getitem__(self,key):
for i in range(len(self.formals)):
if self.formals[i].name == key:
return self.fields[i]
def populate(self,other):
if other.name == "nothing":
return
self.name = other.name
self.used_name = other.used_name
self.package = other.package
self.formals = other.formals
self.is_polymorphic = other.is_polymorphic
self.is_primary = other.is_primary
#self.children = other.children
@property
def children(self):
return [f.type_ide for f in self.formals]
def __str__(self) -> str:
return f"{self.package}::{self.name}"
def __repr__(self) -> str:
return "Type_ide "+self.__str__()
@property
def full_name(self) -> str:
return f"{self.package}::{self.name}"
class Value(Type_ide):
is_string: bool
def __init__(self,value) -> None:
super().__init__(self,value)
self.value = value
if type(value) == str:
self.is_string = True
def __str__(self) -> str:
return f"{self.value}"
def __repr__(self) -> str:
return "Value "+f"{self.value}"
class Type:
position: Position
def __init__(self,name,package=None,position=None,fields=None) -> None:
self.name = name
self.package = package
self.fields = fields
self.position = position
self.primary = False
self.width = None
def __eq__(self, other) -> bool:
if self.name != other.name:
return False
if self.package != other.package:
return False
if self.fields != other.fields:
return False
return True
def __str__(self) -> str:
return f"{self.package}::{self.name}"
def __repr__(self) -> str:
return "Type "+f"{self.package}::{self.name} ({','.join([str(f) for f in self.fields])})"
@property
def full_name(self) -> str:
return f"{self.package}::{self.name}"
class Interface(Type):
def __init__(self,type_ide,members,position=None,attributes=None) -> None:
super().__init__(type_ide.name,type_ide.package,position)
self.type_ide = type_ide
self.members = members
self.attributes = attributes
def __str__(self) -> str:
return f"{self.type_ide}"
def __repr__(self) -> str:
return "Interface "+f"{self.type_ide}"
@property
def full_name(self) -> str:
return f"{self.type_ide}"
class Interface_method:
def __init__(self,name,type,input_types,ports) -> None:
self.name = name
self.type = type
self.input_types = input_types
self.ports = ports
class Enum(Type):
def __init__(self,type_ide,members,width=None,position=None) -> None:
super().__init__(type_ide.name,type_ide.package,position)
self.primary = True
self.members = members
self.width = width
self.type_ide = type_ide
def __str__(self) -> str:
return f"{self.type_ide}"
def __repr__(self) -> str:
return "Enum "+f"{self.type_ide}"
@property
def full_name(self) -> str:
return f"{self.type_ide}"
class Struct(Type):
is_tagged_union = False
is_polymorphic: bool
type_ide: Type_ide
members: Dict[str,Type]
position: Position
def __init__(self,type_ide: Type_ide,members,position=None,is_polymorphic=False,width=None,widths={},is_tagged_union=False) -> None:
super().__init__(type_ide.name,type_ide.package,position)
self.type_ide = type_ide
self.members = members
self.is_polymorphic = is_polymorphic
self.widths = widths
self.width = width
self.is_tagged_union = is_tagged_union
def __str__(self) -> str:
return f"{self.type_ide}"
def __repr__(self) -> str:
return "Struct "+f"{self.type_ide}"
@property
def full_name(self) -> str:
return f"{self.type_ide}"
#list or vector
class GetItemTypes(Type):
type_ide: Type_ide
def __init__(self,type_ide: Type_ide,elem,length =None) -> None:
super().__init__(type_ide.name,type_ide.package)
self.type_ide = type_ide
self.elem = elem
self.length = length
def __str__(self) -> str:
return f"{self.type_ide}"
def __repr__(self) -> str:
return f"{self.type_ide}"
@property
def full_name(self) -> str:
return f"{self.type_ide}"
class Alias:
type_ide: Type_ide
type: Type
position: Position
def __init__(self,type_ide:Type_ide,type,position:Position) -> None:
self.name = type_ide
self.type_ide = type_ide
self.type = type
self.position = position
def __str__(self) -> str:
return f"{self.name.__str__()}"
def __repr__(self) -> str:
return f"alias {self.name} {self.type}"
@property
def full_name(self) -> str:
return self.__str__()
class Type_formal:
is_module: bool = False
type_ide: Type_ide
def __init__(self,name,type_tag=False,numeric_tag=False) -> None:
self.type_ide = name
self.type_tag = type_tag
self.numeric_tag = numeric_tag
def __str__(self) -> str:
return f"""{self.type_ide}"""
def __repr__(self) -> str:
return "T_formal "+self.__str__()
class Module(Type):
type_ide: Type_ide
def __init__(self,name,interface,package=None,position=None,arguments={},provisos=[]) -> None:
Type.__init__(self,name=name,package=package,position=position)
self.interface = interface
self.arguments = arguments
self.provisos = provisos
self.type_ide = interface
def __str__(self) -> str:
return super().__str__() +f""" {self.interface}"""
def __repr__(self) -> str:
return "Module "+self.__str__()
@property
def full_name(self) -> str:
return f"{self.package}::{self.name}"
class Function(Type):
type_ide: Type_ide #result
arguments: Dict[str,Type] = {}
def __init__(self,name,package=None,arguments: Dict[Union[str,int],Type]={},result=None,provisos=[],position=None,argument_names=None) -> None:
Type.__init__(self,name=name,package=package,position=position)
self.result = result
self.type_ide = result
self.provisos = provisos
self.arguments = arguments
def __str__(self) -> str:
return f"{self.type_ide} ({','.join([str(aa) for aa in self.arguments.values()])})"
def __repr__(self) -> str:
return "Function "+self.__str__()
@property
def full_name(self) -> str:
return f"{self.package}::{self.name}"
class Proviso(Type):
type_ide: Type_ide
def __init__(self,type_ide: Type_ide,position=None) -> None:
Type.__init__(self,type_ide.name,type_ide.package,position)
self.type_ide = type_ide
def __str__(self) -> str:
return f"{self.type_ide}"
def __repr__(self) -> str:
return "Proviso "+f"{self.type_ide}"
@property
def full_name(self) -> str:
return f"{self.type_ide}"
class Typeclass_instance():
type_ide: Type_ide
def __init__(self,t_type,provisos=[]) -> None:
self.t_type = t_type
self.provisos = provisos
self.type_ide = t_type
self.inputs = t_type.formals
# add [] operator to allow for typeclass instances to be subscripted
def __getitem__(self,index):
if index == 0:
return self.t_type
if index == 1:
return self.provisos
raise IndexError(f"Typeclass instance index out of range: {index}")
def __str__(self) -> str:
return f"{self.inputs} {self.provisos}"
def __repr__(self) -> str:
return "T_classInstance "+self.__str__()
class Typeclass():
type_ide: Type_ide
position: Position
instances: List[Typeclass_instance]
def __init__(self,type_ide,position=None,members=None,superclasses=None,dependencies=None,instances=None) -> None:
self.type_ide = type_ide
self.position = position
self.members = members
self.superclasses = superclasses
self.dependencies = dependencies
self.instances = instances
def __str__(self) -> str:
return f"{self.type_ide}"
def __repr__(self) -> str:
return "T_class "+self.__str__()
@property
def full_name(self) -> str:
return f"{self.type_ide.package}::{self.type_ide.name}"
#endregion
#trnasforemr
class ModuleTransformer(Transformer):
#region Typeclass workin progress
def tcl_typeclass(self,args):
args = [x for x in args if x is not None]
type_ide = args[0]
# find in args ("supercalsses",x)
superclasses = None
for i,arg in enumerate(args):
if type(arg)==tuple and isinstance(arg[0],str) and arg[0] == "superclasses":
superclasses = args[i][1]
break
# find in args ("dependencies",x)
dependencies = None
for i,arg in enumerate(args):
if type(arg)==tuple and isinstance(arg[0],str) and arg[0] == "dependencies":
dependencies = args[i][1]
break
# find in args ("members",x)
members = None
for i,arg in enumerate(args):
if type(arg)==tuple and isinstance(arg[0],str) and arg[0] == "members":
members = args[i][1]
break
# find in args ("instances",x)
instances = None
for i,arg in enumerate(args):
if type(arg)==tuple and isinstance(arg[0],str) and arg[0] == "instances":
instances = args[i][1]
break
return Typeclass(type_ide,members=members,superclasses=superclasses,dependencies=dependencies,instances=instances,position=args[-1])
def tcl_tc_superclasses(self,args):
return ("superclasses",args)
def tcl_tc_dependencies(self,args):
return ("dependencies",args)
def tcl_tc_d_dependency(self,args):
return (args[0],args[1])
def tcl_tc_instances(self,args):
return ("instances",args)
def tcl_tc_i_instance(self,args):
args = [x for x in args if x is not None]
if len(args) == 1:
return Typeclass_instance(args[0],[])
return Typeclass_instance(args[0], args[1])
def tcl_tc_members(self,args):
members = {}
for arg in args:
if type(arg)==tuple:
members[arg[1]] = arg[2]
else:
members[arg] = None
return ("members",members)
def tcl_tc_m_value(self,args):
# name value
return ("memberValue",args[1],args[0])
def tcl_tc_m_module(self,args):
name = args[-1]
provisos = []
if len(args) == 3:
provisos = args[1]
module = args[0]
module.provisos = provisos
return ("module",name,module)
def tcl_tc_m_function(self,args):
name = args[-1]
provisos = []
if len(args) == 3:
provisos = args[1]
func = args[0]
func.provisos = provisos
return ("function",name,func)
def tcl_provisos(self,args):
provisos = [ Proviso(x) for x in args ]
return ("provisos",provisos)
def tcl_tc_m_f_function(self, args):
try:
arguments = {}
for argument in args[2:]:
if argument[0] =="TypeIdeAndName":
arguments[argument[2]] = argument[1]
else:
arguments[argument[1].name] = argument[1]
return Function(name=args[1],result=args[0],arguments=arguments)
except Exception as e:
print(e)
return None
def tcl_tc_m_f_module(self, args):
arguments = {}
for argument in args[2:]:
if issubclass(type(argument),Type):
arguments[argument.name] = argument
else:
arguments[argument[1]] = argument[0]
return Module(name=args[0],arguments=args[1:-1],interface=args[-1])
def tcl_tc_m_f_argument(self, args):
args = [x for x in args if x is not None]
if len(args) == 2:
return ("TypeIdeAndName",args[0],args[1])
return ("functionOrModule",args[0])
#endregion
#region func and module
#new func
def string_input(self,args):
return args[0][1:-1]
def tcl_function(self, args):
try:
args = [x for x in args if x is not None]
package = None
arguments = {}
provisos = []
result = None
it = 0
if type(args[it])==tuple and args[it][0] == "package":
package = args[it][1]
it+=1
function_name = args[it]
it+=1
if type(args[it])==tuple and args[it][0] == "result":
result = args[it][1]
it+=1
if type(args[it])==tuple and args[it][0] == "arguments":
arguments = args[it][1]
it+=1
if type(args[it])==tuple and args[it][0] == "provisos":
provisos = args[it][1]
it+=1
return Function(name=function_name,package=package,arguments=arguments,result=result,provisos=provisos,position=args[it])
except Exception as e:
print(e)
print(args)
raise e
def package_name(self,args):
return (args[0],args[1])
def name_only(self,args):
return (None,args[0])
def tcl_f_result(self,args):
args = [x for x in args if x is not None]
if len(args) == 2:
args[1].package = args[0]
return args[1]
return ("result",args[0])
def tcl_f_arguments(self,args):
arguments = {}
for i,argument in enumerate(args):
arguments[i] = argument
return ("arguments",arguments)
def tcl_fa_argument(self, args):
return args[0]
# module
def tcl_module(self, args):
try:
args = [x for x in args if x is not None]
package = None
interface = None
arguments = {}
provisos = []
position = None
it = 0
if type(args[it])==tuple and args[it][0] == "package":
package = args[it][1]
it+=1
module_name = args[it]
it+=1
interface = args[it][1]
it+=1
if type(args[it])==tuple and args[it][0] == "arguments":
arguments = args[it][1]
it+=1
if type(args[it])==tuple and args[it][0] == "provisos":
provisos = args[it][1]
it+=1
position = args[it]
return Module(name=module_name,package=package,interface=interface,arguments=arguments,provisos=provisos,position=position)
except Exception as e:
print(e)
print(args)
raise e
def tcl_m_interface(self, args):
args = [x for x in args if x is not None]
if len(args) == 2:
args[1].package = args[0]
return args[1]
return ("interface",args[0])
#endregion
#region types
def typeprimary(self,args):
return args[0]
def tcl_primary(self,args):
args = [x for x in args if x is not None]
#TODO: check what to do
args[0].is_primary = True
if len(args) == 2:
args[0].width = args[1][1]
return args[0]
def tcl_width(self, args):
return ("width",args[0])
def tcl_enum(self, args):
try:
args = [x for x in args if x is not None]
width = None
if len(args) == 4:
width = args[2][1]
position = args[-1]
return Enum(type_ide=args[0],members=args[1],width=width,position=position)
except Exception as e:
print(e)
return None
def tcl_alias(self, args):
return Alias(type_ide=args[0],type=args[1],position=args[2])
def tcl_interface_dec(self, args):
args = [x for x in args if x is not None]
attributes = None
if len(args)==4:
attributes = args[3]
members_set = {args[1][x][0]:args[1][x][1] for x in range(len(args[1]))}
return Interface(type_ide=args[0],members=members_set,position=args[2],attributes=attributes)
def tcl_im_subinterface(self, args):
return (args[1],Interface(type_ide=args[0],members=None))
def tcl_im_method(self,args):
return (args[1],Interface_method(name=args[1],type=args[0],input_types=None,ports=None))
def struct(self, args):
type_ide = args[0]
members = args[1][1]
widths = args[1][2]
if len(args) == 4:
width = args[2]
else:
width = None
position = args[-1]
return Struct(type_ide=type_ide,members=members,width=width,position=position,widths=widths)
def tcl_tagged_union(self, args):
type_ide = args[0]
members = args[1][1]
widths = args[1][2]
if len(args) == 4:
width = args[2]
else:
width = None
position = args[-1]
return Struct(type_ide=type_ide,members=members,width=width,position=position,widths=widths,is_tagged_union=True)
def poly_struct(self, args):
struct = self.struct(args)
struct.is_polymorphic = True
return struct
def tcl_stu_members(self, args):
#dict of members
members = {}
widths = {}
for x in range(len(args)):
members[args[x][2]] = args[x][1]
widths[args[x][2]] = args[x][3]
return ("members_widths",members,widths)
def tcl_stu_member(self,args):
width = None
if type(args[-1]) == tuple and args[-1][0] == "width":
width = args[-1][1]
return ("member",args[0],args[1],width)
def tcl_v_elem(self, args):
return args[0]
def tcl_v_length(self, args):
return args[0]
def tcl_vector(self, args):
return GetItemTypes(type_ide=args[0],elem = args[2],length =args[1])
def tcl_list(self, args):
return GetItemTypes(type_ide=args[0],elem = args[1])
# type_def_type
def type_ide(self, args):
return ("type_ide",args[0])
def type_ide_poly(self, args):
return ("type_ide_poly",args[0])
def type_def_type_value(self, args):
return Value(args[0])
def type_def_type(self, args):
if type(args[0]) == Value:
return args[0]
args = [x for x in args if x is not None]
poly = False
package = None
args_len = len(args)
name_len = 1
if type(args[0]) == tuple:
if args[0][0] == "type_ide_poly":
name = args[0][1]
poly = True
else:
name = args[0][1]
package = args[0][0]
return Type_ide(name=name,package=package,formals=([] if len(args)==1 else args[name_len]),is_polymorphic=poly)
def type_formal(self, args):
return Type_formal(name=args[0])
def module_type_formal(self, args):
args[0].is_module = True
return args[0]
def type_type_formal(self, args):
return Type_formal(name=args[0],type_tag=True)
def numeric_type_formal(self, args):
return Type_formal(name=args[0],numeric_tag=True,type_tag=True)
#endregion
#utility
def tcl_type_full(self,args):
return args[0]
def package_name_solo(self,args):
return ("package",args[0])
#region old crap
def tcl_position(self, args):
return Position(args[0],args[1],args[2])
def tcl_path(self, args):
# remove None from args, this is to avoid bugs comming from parser
args = [x for x in args if x is not None]
return os.path.join(*args)#+".bsv"
def tp_parametric(self, args):
# remove None from args, this is to avoid bugs comming from parser
args = [x for x in args if x is not None]
if len(args)==1 and type(args[0]) == str and (args[0][0].islower() or args[0][0] == "_" or args[0][0] == "$"):
return args[0]
ct = 0
package = None
if type(args[0]) == tuple:
package = args[0][1]
ct = 1
name = args[ct]
return Type(fields=args[ct+1:],name=name,package=package)
def t_single(self, args):
return args[0]
def tcl_arguments(self, args):
return args
def tcl_interface_use(self, args):
return Type(name="Interface",fields=args[1])
def identifier_u(self, name):
return name[0].value
def identifier_l(self, name):
return name[0].value
def string_placeholder(self, name):
return name[0]
def int_value(self,number):
return int(number[0])
def NUMBER(self, number):
return int(number.value)
def list_of(self,args):
return args
def tcl_polymorphic(self, args):
return (args[0],"polyTAG")
#endregion
parser: Lark = None
typeParser: Lark = None
def initalize_parser(start: str="start"):
global parser
global typeParser
with open(os.path.join(os.path.join(os.path.dirname(__file__),"..","grammar"), "type.lark")) as f:
lark_string = f.read()
parser = Lark(lark_string, parser="earley",start=start)
typeParser = Lark(lark_string, parser="earley",start="type")
return parser
def parse_and_transform(tcl_string: Union[str,bytes]):
if type(tcl_string) == bytes:
tcl_string = tcl_string.decode("utf-8")
global parser
try:
parsed = parser.parse(tcl_string)
except Exception as e:
if tcl_string == 'can\'t read "Cons": no such variable' or 'Typeclass {Generic#' in tcl_string:
raise Exception("Failed to parse: \n")
raise Exception("Failed to parse: \n")
result = ModuleTransformer().transform(parsed)
return result
def evaluateType(type_string: Union[str,bytes]):
if type(type_string) == bytes:
type_string = type_string.decode("utf-8")
global typeParser
parsed = typeParser.parse(type_string)
result = ModuleTransformer().transform(parsed)
return result
def evaluateCustomStart(type_string: Union[str,bytes],start:str):
if type(type_string) == bytes:
type_string = type_string.decode("utf-8")
with open(os.path.join(os.path.join(os.path.dirname(__file__),"..","grammar"), "type.lark")) as f:
lark_string = f.read()
customParser = Lark(lark_string, parser="earley",start=start)
parsed = customParser.parse(type_string,start=start)
result = ModuleTransformer().transform(parsed)
return result
if __name__ == "__main__":
parser = initalize_parser(start="tcl_type_full")
result = parse_and_transform(example_text)
print(result)
| 29.775213 | 147 | 0.583636 |
362673a71b7589832958f5131bc6272eb5d6f210 | 17,662 | py | Python | tests/test_middleware.py | st3ldz/django-cors-headers | 57a5bdfef2223a2f6f973bcbae261c3055136707 | [
"MIT"
] | null | null | null | tests/test_middleware.py | st3ldz/django-cors-headers | 57a5bdfef2223a2f6f973bcbae261c3055136707 | [
"MIT"
] | null | null | null | tests/test_middleware.py | st3ldz/django-cors-headers | 57a5bdfef2223a2f6f973bcbae261c3055136707 | [
"MIT"
] | null | null | null | from django.http import HttpResponse
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.deprecation import MiddlewareMixin
from corsheaders.middleware import (
ACCESS_CONTROL_ALLOW_CREDENTIALS,
ACCESS_CONTROL_ALLOW_HEADERS,
ACCESS_CONTROL_ALLOW_METHODS,
ACCESS_CONTROL_ALLOW_ORIGIN,
ACCESS_CONTROL_EXPOSE_HEADERS,
ACCESS_CONTROL_MAX_AGE,
)
from tests.utils import (
append_middleware,
prepend_middleware,
temporary_check_request_hander,
)
class ShortCircuitMiddleware(MiddlewareMixin):
def process_request(self, request):
return HttpResponse("short-circuit-middleware-response")
class CorsMiddlewareTests(TestCase):
def test_get_no_origin(self):
resp = self.client.get("/")
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
def test_get_origin_vary_by_default(self):
resp = self.client.get("/")
assert resp["Vary"] == "Origin"
def test_get_invalid_origin(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.com]")
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
@override_settings(CORS_ALLOWED_ORIGINS=["http://example.com"])
def test_get_not_in_allowed_origins(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.org")
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
@override_settings(CORS_ALLOWED_ORIGINS=["https://example.org"])
def test_get_not_in_allowed_origins_due_to_wrong_scheme(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.org")
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
@override_settings(
CORS_ALLOWED_ORIGINS=["http://example.com", "http://example.org"]
)
def test_get_in_allowed_origins(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.org")
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "http://example.org"
@override_settings(CORS_ALLOWED_ORIGINS=["http://example.com", "null"])
def test_null_in_allowed_origins(self):
resp = self.client.get("/", HTTP_ORIGIN="null")
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "null"
@override_settings(CORS_ALLOWED_ORIGINS=["http://example.com", "file://"])
def test_file_in_allowed_origins(self):
"""
'file://' should be allowed as an origin since Chrome on Android
mistakenly sends it
"""
resp = self.client.get("/", HTTP_ORIGIN="file://")
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "file://"
@override_settings(
CORS_ALLOW_ALL_ORIGINS=True,
CORS_EXPOSE_HEADERS=["accept", "origin", "content-type"],
)
def test_get_expose_headers(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.com")
assert resp[ACCESS_CONTROL_EXPOSE_HEADERS] == "accept, origin, content-type"
@override_settings(CORS_ALLOW_ALL_ORIGINS=True)
def test_get_dont_expose_headers(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.com")
assert ACCESS_CONTROL_EXPOSE_HEADERS not in resp
@override_settings(CORS_ALLOW_CREDENTIALS=True, CORS_ALLOW_ALL_ORIGINS=True)
def test_get_allow_credentials(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.com")
assert resp[ACCESS_CONTROL_ALLOW_CREDENTIALS] == "true"
@override_settings(CORS_ALLOW_ALL_ORIGINS=True)
def test_get_dont_allow_credentials(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.com")
assert ACCESS_CONTROL_ALLOW_CREDENTIALS not in resp
@override_settings(
CORS_ALLOW_HEADERS=["content-type", "origin"],
CORS_ALLOW_METHODS=["GET", "OPTIONS"],
CORS_PREFLIGHT_MAX_AGE=1002,
CORS_ALLOW_ALL_ORIGINS=True,
)
def test_options_allowed_origin(self):
resp = self.client.options("/", HTTP_ORIGIN="http://example.com")
assert resp[ACCESS_CONTROL_ALLOW_HEADERS] == "content-type, origin"
assert resp[ACCESS_CONTROL_ALLOW_METHODS] == "GET, OPTIONS"
assert resp[ACCESS_CONTROL_MAX_AGE] == "1002"
@override_settings(
CORS_ALLOW_HEADERS=["content-type", "origin"],
CORS_ALLOW_METHODS=["GET", "OPTIONS"],
CORS_PREFLIGHT_MAX_AGE=0,
CORS_ALLOW_ALL_ORIGINS=True,
)
def test_options_no_max_age(self):
resp = self.client.options("/", HTTP_ORIGIN="http://example.com")
assert resp[ACCESS_CONTROL_ALLOW_HEADERS] == "content-type, origin"
assert resp[ACCESS_CONTROL_ALLOW_METHODS] == "GET, OPTIONS"
assert ACCESS_CONTROL_MAX_AGE not in resp
@override_settings(
CORS_ALLOWED_ORIGINS=["http://localhost:9000"],
)
def test_options_allowed_origins_with_port(self):
resp = self.client.options("/", HTTP_ORIGIN="http://localhost:9000")
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "http://localhost:9000"
@override_settings(
CORS_ALLOWED_ORIGIN_REGEXES=[r"^https://\w+\.example\.com$"],
)
def test_options_adds_origin_when_domain_found_in_allowed_regexes(self):
resp = self.client.options("/", HTTP_ORIGIN="https://foo.example.com")
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "https://foo.example.com"
@override_settings(
CORS_ALLOWED_ORIGIN_REGEXES=[
r"^https://\w+\.example\.org$",
r"^https://\w+\.example\.com$",
],
)
def test_options_adds_origin_when_domain_found_in_allowed_regexes_second(self):
resp = self.client.options("/", HTTP_ORIGIN="https://foo.example.com")
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "https://foo.example.com"
@override_settings(
CORS_ALLOWED_ORIGIN_REGEXES=[r"^https://\w+\.example\.org$"],
)
def test_options_doesnt_add_origin_when_domain_not_found_in_allowed_regexes(
self,
):
resp = self.client.options("/", HTTP_ORIGIN="https://foo.example.com")
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
def test_options(self):
resp = self.client.options("/", HTTP_ACCESS_CONTROL_REQUEST_METHOD="value")
assert resp.status_code == 200
def test_options_empty_request_method(self):
resp = self.client.options("/", HTTP_ACCESS_CONTROL_REQUEST_METHOD="")
assert resp.status_code == 200
def test_options_no_header(self):
resp = self.client.options("/")
assert resp.status_code == 404
@override_settings(CORS_ALLOW_CREDENTIALS=True, CORS_ALLOW_ALL_ORIGINS=True)
def test_allow_all_origins_get(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.com")
assert resp.status_code == 200
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "http://example.com"
assert resp["Vary"] == "Origin"
@override_settings(CORS_ALLOW_CREDENTIALS=True, CORS_ALLOW_ALL_ORIGINS=True)
def test_allow_all_origins_options(self):
resp = self.client.options(
"/",
HTTP_ORIGIN="http://example.com",
HTTP_ACCESS_CONTROL_REQUEST_METHOD="value",
)
assert resp.status_code == 200
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "http://example.com"
assert resp["Vary"] == "Origin"
@override_settings(CORS_ALLOW_CREDENTIALS=True, CORS_ALLOW_ALL_ORIGINS=True)
def test_non_200_headers_still_set(self):
"""
It's not clear whether the header should still be set for non-HTTP200
when not a preflight request. However this is the existing behaviour for
django-cors-middleware, so at least this test makes that explicit, especially
since for the switch to Django 1.10, special-handling will need to be put in
place to preserve this behaviour. See `ExceptionMiddleware` mention here:
https://docs.djangoproject.com/en/3.0/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware # noqa: E501
"""
resp = self.client.get("/test-401/", HTTP_ORIGIN="http://example.com")
assert resp.status_code == 401
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "http://example.com"
@override_settings(CORS_ALLOW_CREDENTIALS=True, CORS_ALLOW_ALL_ORIGINS=True)
def test_auth_view_options(self):
"""
Ensure HTTP200 and header still set, for preflight requests to views requiring
authentication. See: https://github.com/adamchainz/django-cors-headers/issues/3
"""
resp = self.client.options(
"/test-401/",
HTTP_ORIGIN="http://example.com",
HTTP_ACCESS_CONTROL_REQUEST_METHOD="value",
)
assert resp.status_code == 200
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "http://example.com"
assert resp["Content-Length"] == "0"
def test_signal_handler_that_returns_false(self):
def handler(*args, **kwargs):
return False
with temporary_check_request_hander(handler):
resp = self.client.options(
"/",
HTTP_ORIGIN="http://example.com",
HTTP_ACCESS_CONTROL_REQUEST_METHOD="value",
)
assert resp.status_code == 200
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
def test_signal_handler_that_returns_true(self):
def handler(*args, **kwargs):
return True
with temporary_check_request_hander(handler):
resp = self.client.options(
"/",
HTTP_ORIGIN="http://example.com",
HTTP_ACCESS_CONTROL_REQUEST_METHOD="value",
)
assert resp.status_code == 200
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "http://example.com"
@override_settings(CORS_ALLOWED_ORIGINS=["http://example.com"])
def test_signal_handler_allow_some_urls_to_everyone(self):
def allow_api_to_all(sender, request, **kwargs):
return request.path.startswith("/api/")
with temporary_check_request_hander(allow_api_to_all):
resp = self.client.options(
"/",
HTTP_ORIGIN="http://example.org",
HTTP_ACCESS_CONTROL_REQUEST_METHOD="value",
)
assert resp.status_code == 200
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
resp = self.client.options(
"/api/something/",
HTTP_ORIGIN="http://example.org",
HTTP_ACCESS_CONTROL_REQUEST_METHOD="value",
)
assert resp.status_code == 200
assert resp[ACCESS_CONTROL_ALLOW_ORIGIN] == "http://example.org"
@override_settings(CORS_ALLOWED_ORIGINS=["http://example.com"])
def test_signal_called_once_during_normal_flow(self):
calls = 0
def allow_all(sender, request, **kwargs):
nonlocal calls
calls += 1
return True
with temporary_check_request_hander(allow_all):
self.client.get("/", HTTP_ORIGIN="http://example.org")
assert calls == 1
@override_settings(CORS_ALLOWED_ORIGINS=["http://example.com"])
@prepend_middleware("tests.test_middleware.ShortCircuitMiddleware")
def test_get_short_circuit(self):
"""
Test a scenario when a middleware that returns a response is run before
the ``CorsMiddleware``. In this case
``CorsMiddleware.process_response()`` should ignore the request if
MIDDLEWARE setting is used (new mechanism in Django 1.10+).
"""
resp = self.client.get("/", HTTP_ORIGIN="http://example.com")
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
@override_settings(
CORS_ALLOWED_ORIGINS=["http://example.com"], CORS_URLS_REGEX=r"^/foo/$"
)
@prepend_middleware(__name__ + ".ShortCircuitMiddleware")
def test_get_short_circuit_should_be_ignored(self):
resp = self.client.get("/", HTTP_ORIGIN="http://example.com")
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
@override_settings(
CORS_ALLOWED_ORIGINS=["http://example.com"], CORS_URLS_REGEX=r"^/foo/$"
)
def test_get_regex_matches(self):
resp = self.client.get("/foo/", HTTP_ORIGIN="http://example.com")
assert ACCESS_CONTROL_ALLOW_ORIGIN in resp
@override_settings(
CORS_ALLOWED_ORIGINS=["http://example.com"], CORS_URLS_REGEX=r"^/not-foo/$"
)
def test_get_regex_doesnt_match(self):
resp = self.client.get("/foo/", HTTP_ORIGIN="http://example.com")
assert ACCESS_CONTROL_ALLOW_ORIGIN not in resp
@override_settings(
CORS_ALLOWED_ORIGINS=["http://example.com"], CORS_URLS_REGEX=r"^/foo/$"
)
def test_get_regex_matches_path_info(self):
resp = self.client.get(
"/foo/", HTTP_ORIGIN="http://example.com", SCRIPT_NAME="/prefix/"
)
assert ACCESS_CONTROL_ALLOW_ORIGIN in resp
@override_settings(CORS_ALLOWED_ORIGINS=["http://example.com"])
def test_cors_enabled_is_attached_and_bool(self):
"""
Ensure that request._cors_enabled is available - although a private API
someone might use it for debugging
"""
resp = self.client.get("/", HTTP_ORIGIN="http://example.com")
request = resp.wsgi_request
assert isinstance(request._cors_enabled, bool)
assert request._cors_enabled
@override_settings(CORS_ALLOWED_ORIGINS=["http://example.com"])
def test_works_if_view_deletes_cors_enabled(self):
"""
Just in case something crazy happens in the view or other middleware,
check that get_response doesn't fall over if `_cors_enabled` is removed
"""
resp = self.client.get("/delete-is-enabled/", HTTP_ORIGIN="http://example.com")
assert ACCESS_CONTROL_ALLOW_ORIGIN in resp
@override_settings(
CORS_REPLACE_HTTPS_REFERER=True, CORS_ALLOWED_ORIGIN_REGEXES=[r".*example.*"]
)
class RefererReplacementCorsMiddlewareTests(TestCase):
@override_settings(MIDDLEWARE=["corsheaders.middleware.CorsPostCsrfMiddleware"])
def test_post_middleware_alone(self):
resp = self.client.get("/")
assert resp.status_code == 200
def test_get_replaces_referer_when_secure(self):
resp = self.client.get(
"/",
HTTP_FAKE_SECURE="true",
HTTP_HOST="example.com",
HTTP_ORIGIN="https://example.org",
HTTP_REFERER="https://example.org/foo",
)
assert resp.status_code == 200
assert resp.wsgi_request.META["HTTP_REFERER"] == "https://example.com/"
assert (
resp.wsgi_request.META["ORIGINAL_HTTP_REFERER"] == "https://example.org/foo"
)
@append_middleware("corsheaders.middleware.CorsPostCsrfMiddleware")
def test_get_post_middleware_rereplaces_referer_when_secure(self):
resp = self.client.get(
"/",
HTTP_FAKE_SECURE="true",
HTTP_HOST="example.com",
HTTP_ORIGIN="https://example.org",
HTTP_REFERER="https://example.org/foo",
)
assert resp.status_code == 200
assert resp.wsgi_request.META["HTTP_REFERER"] == "https://example.org/foo"
assert "ORIGINAL_HTTP_REFERER" not in resp.wsgi_request.META
def test_get_does_not_replace_referer_when_insecure(self):
resp = self.client.get(
"/",
HTTP_HOST="example.com",
HTTP_ORIGIN="https://example.org",
HTTP_REFERER="https://example.org/foo",
)
assert resp.status_code == 200
assert resp.wsgi_request.META["HTTP_REFERER"] == "https://example.org/foo"
assert "ORIGINAL_HTTP_REFERER" not in resp.wsgi_request.META
@override_settings(CORS_REPLACE_HTTPS_REFERER=False)
def test_get_does_not_replace_referer_when_disabled(self):
resp = self.client.get(
"/",
HTTP_FAKE_SECURE="true",
HTTP_HOST="example.com",
HTTP_ORIGIN="https://example.org",
HTTP_REFERER="https://example.org/foo",
)
assert resp.status_code == 200
assert resp.wsgi_request.META["HTTP_REFERER"] == "https://example.org/foo"
assert "ORIGINAL_HTTP_REFERER" not in resp.wsgi_request.META
def test_get_does_not_fail_in_referer_replacement_when_referer_missing(self):
resp = self.client.get(
"/",
HTTP_FAKE_SECURE="true",
HTTP_HOST="example.com",
HTTP_ORIGIN="https://example.org",
)
assert resp.status_code == 200
assert "HTTP_REFERER" not in resp.wsgi_request.META
assert "ORIGINAL_HTTP_REFERER" not in resp.wsgi_request.META
def test_get_does_not_fail_in_referer_replacement_when_host_missing(self):
resp = self.client.get(
"/",
HTTP_FAKE_SECURE="true",
HTTP_ORIGIN="https://example.org",
HTTP_REFERER="https://example.org/foo",
)
assert resp.status_code == 200
assert resp.wsgi_request.META["HTTP_REFERER"] == "https://example.org/foo"
assert "ORIGINAL_HTTP_REFERER" not in resp.wsgi_request.META
@override_settings(CORS_ALLOWED_ORIGIN_REGEXES=[])
def test_get_does_not_replace_referer_when_not_valid_request(self):
resp = self.client.get(
"/",
HTTP_FAKE_SECURE="true",
HTTP_HOST="example.com",
HTTP_ORIGIN="https://example.org",
HTTP_REFERER="https://example.org/foo",
)
assert resp.status_code == 200
assert resp.wsgi_request.META["HTTP_REFERER"] == "https://example.org/foo"
assert "ORIGINAL_HTTP_REFERER" not in resp.wsgi_request.META
| 41.074419 | 126 | 0.664591 |
42aa73aa8145fe4c64603fbc7c7c3ce80aca0a50 | 348 | py | Python | FiNDAPI/api/classification/operations.py | NanoNLP/FiNDAPI | 421745530c808a2253081672cc2ee64271a68cf0 | [
"MIT"
] | null | null | null | FiNDAPI/api/classification/operations.py | NanoNLP/FiNDAPI | 421745530c808a2253081672cc2ee64271a68cf0 | [
"MIT"
] | null | null | null | FiNDAPI/api/classification/operations.py | NanoNLP/FiNDAPI | 421745530c808a2253081672cc2ee64271a68cf0 | [
"MIT"
] | null | null | null | def create_classification(data):
"""
Adds a classification to a paper in the database.
"""
# TODO
def update_classification(id, data):
"""
Updates the classifications related to a paper.
"""
# TODO
def delete_classification(id, data):
"""
Deletes all classifications related to a paper.
"""
# TODO | 19.333333 | 53 | 0.632184 |
4521a15b119998303645a67e6434d256285e019f | 677 | py | Python | app/core/management/commands/wait_for_db.py | drewjg11/recipe-app-api | 6bf5dc8e5876fe181d63455f3fcc14e482e98a04 | [
"MIT"
] | null | null | null | app/core/management/commands/wait_for_db.py | drewjg11/recipe-app-api | 6bf5dc8e5876fe181d63455f3fcc14e482e98a04 | [
"MIT"
] | null | null | null | app/core/management/commands/wait_for_db.py | drewjg11/recipe-app-api | 6bf5dc8e5876fe181d63455f3fcc14e482e98a04 | [
"MIT"
] | null | null | null | import time
from django.db import connections
from django.db.utils import OperationalError
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""Django command to pause execution until database is available"""
def handle(self, *args, **options):
self.stdout.write('Waiting for database...')
db_conn = None
while not db_conn:
try:
db_conn = connections['default']
except OperationalError:
self.stdout.write('Database unavailable, waiting 1 second...')
time.sleep(1)
self.stdout.write(self.style.SUCCESS('Database available!!'))
| 30.772727 | 78 | 0.646972 |
7f85cc861b08f8365c7fb7c2f03cdbbaf9da4978 | 395 | py | Python | buyandbye/wsgi.py | The-God-Fathers/BuyandBye | c473a09ff3764ae28c3f5608ab83117c7a806c01 | [
"MIT"
] | null | null | null | buyandbye/wsgi.py | The-God-Fathers/BuyandBye | c473a09ff3764ae28c3f5608ab83117c7a806c01 | [
"MIT"
] | 8 | 2021-03-19T10:14:56.000Z | 2022-03-12T00:43:14.000Z | buyandbye/wsgi.py | The-God-Fathers/BuyandBye | c473a09ff3764ae28c3f5608ab83117c7a806c01 | [
"MIT"
] | null | null | null | """
WSGI config for buyandbye project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'buyandbye.settings')
application = get_wsgi_application()
| 23.235294 | 78 | 0.787342 |
87049d43a7788fa56c8ae642c4a4c4910d29121b | 1,437 | py | Python | setup.py | nurupo/mingw-ldd | 67eff2bf8a48b070539f7a6a1855dac0a3174189 | [
"MIT"
] | 11 | 2020-05-31T12:55:48.000Z | 2022-03-03T08:37:39.000Z | setup.py | nurupo/mingw-ldd | 67eff2bf8a48b070539f7a6a1855dac0a3174189 | [
"MIT"
] | 1 | 2021-11-26T18:40:25.000Z | 2021-12-29T03:13:15.000Z | setup.py | nurupo/mingw-ldd | 67eff2bf8a48b070539f7a6a1855dac0a3174189 | [
"MIT"
] | 1 | 2020-05-31T05:05:32.000Z | 2020-05-31T05:05:32.000Z | import os
from setuptools import setup
about = {}
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'mingw_ldd', '__version__.py'), 'r', encoding='utf-8') as f:
exec(f.read(), about)
setup(
name=about['__title__'],
version=about['__version__'],
description=about['__description__'],
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
license=about['__license__'],
packages=[about['__title__']],
python_requires='>=3.5',
install_requires=['pefile'],
entry_points={
'console_scripts': ['{}={}.__main__:main'.format(about['__title__'].replace('_', '-'), about['__title__'])],
},
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'Environment :: Console',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython'
]
)
| 34.214286 | 116 | 0.627001 |
72b3bb0c3f698f10a044d9116145b039809ae3a1 | 1,899 | py | Python | commands/restrictions.py | nstra111/autovc | e73e1fea7b566721c3dce3ca6f587472e7ee9d1b | [
"MIT"
] | 177 | 2020-02-02T18:03:46.000Z | 2022-03-17T06:18:43.000Z | commands/restrictions.py | zigsphere/Auto-Voice-Channels | 6ae901728580bef4246737a6f1b9f10763badd3e | [
"MIT"
] | 82 | 2020-02-02T17:43:18.000Z | 2022-03-24T20:34:55.000Z | commands/restrictions.py | zigsphere/Auto-Voice-Channels | 6ae901728580bef4246737a6f1b9f10763badd3e | [
"MIT"
] | 165 | 2019-02-17T20:15:20.000Z | 2022-03-27T23:59:23.000Z | from commands.base import Cmd
help_text = [
[
("Usage:",
"<PREFIX><COMMAND>\n"
"<PREFIX><COMMAND> `COMMAND`"),
("Description:",
"Show any role retrictions set for all or a particular command using `<PREFIX>restrict`."),
("Examples:",
"`<PREFIX><COMMAND>`\n"
"`<PREFIX><COMMAND> name`\n"
"`<PREFIX><COMMAND> lock`"),
]
]
async def execute(ctx, params):
cmd = ' '.join(params).strip()
guild = ctx['guild']
settings = ctx['settings']
if 'restrictions' not in settings or not settings['restrictions']:
return True, "There are currently no restrictions for any commands."
if cmd:
from commands import commands
if cmd not in commands:
if 'dcnf' not in ctx['settings'] or ctx['settings']['dcnf'] is False:
return False, ("`{}` is not a recognized command. Run '**{}help**' "
"to get a list of commands".format(cmd, ctx['print_prefix']))
else:
return False, "NO RESPONSE"
restrictions = {}
for r, rv in settings['restrictions'].items():
if not cmd or r == cmd:
restrictions[r] = rv
if not restrictions:
if not cmd:
return True, "There are currently no restrictions for any commands."
return True, "There are currently no restrictions for the `{}` command.".format(cmd)
s = "**Restrictions:**"
for r, rv in restrictions.items():
roles = []
for rid in rv:
role = guild.get_role(rid)
roles.append("{} (`{}`)".format(role.name, role.id) if role else "⚠ REMOVED ROLE")
s += "\n`{}`: {}".format(r, ', '.join(roles))
return True, s
command = Cmd(
execute=execute,
help_text=help_text,
params_required=0,
gold_required=True,
admin_required=True,
)
| 30.629032 | 100 | 0.558715 |
72678a16069d8c129e6ba9e703f65e4e95eb4c4e | 2,407 | py | Python | energyPATHWAYS/tests/test_util.py | jdailey/EnergyPATHWAYS | 0fb0ead475b6395f6b07fc43fe6c85826ee47d0f | [
"MIT"
] | 26 | 2017-02-06T22:57:29.000Z | 2022-03-25T20:02:32.000Z | energyPATHWAYS/tests/test_util.py | jdailey/EnergyPATHWAYS | 0fb0ead475b6395f6b07fc43fe6c85826ee47d0f | [
"MIT"
] | 65 | 2016-01-22T01:33:05.000Z | 2016-09-03T14:46:08.000Z | energyPATHWAYS/tests/test_util.py | jdailey/EnergyPATHWAYS | 0fb0ead475b6395f6b07fc43fe6c85826ee47d0f | [
"MIT"
] | 11 | 2017-02-24T23:27:36.000Z | 2021-10-18T17:33:37.000Z | # -*- coding: utf-8 -*-
__author__ = 'Michael'
import unittest
import mock
import energyPATHWAYS
from energyPATHWAYS.util import *
def read_table(table_name, column_names='*', return_unique=False, return_iterable=False, **filters):
if table_name == 'IDMap' and column_names == 'identifier_id, ref_table':
return [(u'supply_type_id', u'SupplyTypes'), (u'ghg_id', u'GreenhouseGases')]
elif table_name == 'SupplyTypes' and column_names == 'id, name':
return [(1, u'Blend'), (2, u'Conversion'), (3, u'Delivery'), (4, u'Import'), (5, u'Primary'), (6, u'Storage')]
elif table_name == 'GreenhouseGases' and column_names == 'id, name':
return [(1, u'CO2'), (2, u'CH4'), (3, u'N2O')]
# if we've gotten this far without returning, something is amiss
raise ValueError("Mock doesn't know how to provide this table read: " +
str(table_name) + ", " + str(column_names) + ", " + str(filters))
mock_sql_read_table = mock.create_autospec(csv_read_table, side_effect=read_table)
@mock.patch('energyPATHWAYS.util.csv_read_table', mock_sql_read_table)
class TestIdToName(unittest.TestCase):
def test_basic_lookup(self):
self.assertEqual(id_to_name('supply_type_id', 1), 'Blend')
self.assertEqual(id_to_name('ghg_id', 2), 'CH4')
def test_lookup_none_att(self):
self.assertIsNone(id_to_name('supply_type_id', None))
def test_tuple_lookup(self):
self.assertEqual(id_to_name('supply_type_id', 1, 'tuple'), ('supply_type', 'Blend'))
def test_lookup_unknown_table(self):
with self.assertRaises(KeyError):
id_to_name('GARBAGE', 1)
def test_lookup_att_out_of_range(self):
self.assertIsNone(id_to_name('supply_type_id', -1))
def test_caching(self):
# Check to make sure the lookup cache is where we expect, then clear it
self.assertIsNotNone(energyPATHWAYS.util.id_to_name.lookup_dict)
energyPATHWAYS.util.id_to_name.lookup_dict = {}
id_to_name('supply_type_id', 1)
self.assertTrue(mock_sql_read_table.called, "Database not accessed on first call to id_to_name()")
mock_sql_read_table.reset_mock()
id_to_name('ghg_id', 2)
# the second time everything needed should be cached so there should be no more db calls
self.assertFalse(mock_sql_read_table.called, "Redundant database access by id_to_name()")
| 42.982143 | 118 | 0.68924 |
9c4657a8e97af86c06b64e77bc178212bb2d5c77 | 1,798 | py | Python | Tree/lca_of_binary_tree.py | lakshyarawal/pythonPractice | 4b400342198a8270c5ac0c6306afb555f927c6c1 | [
"MIT"
] | null | null | null | Tree/lca_of_binary_tree.py | lakshyarawal/pythonPractice | 4b400342198a8270c5ac0c6306afb555f927c6c1 | [
"MIT"
] | null | null | null | Tree/lca_of_binary_tree.py | lakshyarawal/pythonPractice | 4b400342198a8270c5ac0c6306afb555f927c6c1 | [
"MIT"
] | null | null | null | """ LCA: Lowest Common Ancestor of two values in a binary tree """
from binary_tree_traversal_and_height import Node
from collections import deque
def find_path(root_n, path_a, n):
if root_n is None:
return False
path_a.append(root_n.value)
if root_n.value == n:
return True
if find_path(root_n.left_child, path_a, n) or find_path(root_n.right_child, path_a, n):
return True
path_a.pop()
return False
def lca_bin(root_n, n1, n2):
if root_n is None:
return False
path_1 = deque()
path_2 = deque()
if find_path(root_n, path_1, n1) is False or find_path(root_n, path_2, n2) is False:
return False
i = 0
print(path_1)
print(path_2)
while i < len(path_2) and i < len(path_1):
if path_2[i+1] != path_1[i+1]:
return path_1[i]
i += 1
return None
""" The above approach does 4 traversal of the tree and we can improve on this"""
def lca_eff(root_n, n1, n2):
if root_n is None:
return None
if root_n.value == n1 or root_n.value == n2:
return root_n.value
lca1 = lca_eff(root_n.left_child, n1, n2)
lca2 = lca_eff(root_n.right_child, n1, n2)
if lca1 is not None and lca2 is not None:
return root_n.value
if lca1 is None:
return lca2
else:
return lca1
def main():
root = Node(10)
root.left_child = Node(20)
root.right_child = Node(30)
root.right_child.left_child = Node(40)
root.right_child.right_child = Node(50)
root.right_child.left_child.left_child = Node(60)
root.right_child.right_child.left_child = Node(70)
root.right_child.right_child.right_child = Node(80)
print(lca_bin(root, 60, 70))
print(lca_eff(root, 60, 70))
if __name__ == "__main__":
main()
| 26.441176 | 91 | 0.644605 |
b9980b32a1241f7718ed2032c03470ff8505b849 | 8,092 | py | Python | ks_gini_cap30_AUC_multilabel.py | gaho8435/ks_gini_cap30_AUC | e04f3813869cb2183c4772190918e69a4f94492c | [
"MIT"
] | null | null | null | ks_gini_cap30_AUC_multilabel.py | gaho8435/ks_gini_cap30_AUC | e04f3813869cb2183c4772190918e69a4f94492c | [
"MIT"
] | null | null | null | ks_gini_cap30_AUC_multilabel.py | gaho8435/ks_gini_cap30_AUC | e04f3813869cb2183c4772190918e69a4f94492c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import time
import copy
from keras.utils import to_categorical
from sklearn import ensemble, preprocessing, metrics, model_selection
from sklearn.metrics import roc_auc_score
# In[2]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import time
import copy
from keras.utils import to_categorical
from sklearn import ensemble, preprocessing, metrics, model_selection
from sklearn.metrics import roc_auc_score
# In[2]:
class ks_gini_cap30_AUC():
def __init__(self, classes, model_predict_proba, y, do_cate=True):
tStart = time.time()
self.classes = classes #類別數,包含0
self.model_predict_proba = np.array(model_predict_proba) #預測機率
self.y = np.array(y) #真實y
############################計算ROC&AUC############################
fpr = dict()
tpr = dict()
roc_auc = dict()
if do_cate == True:
train_y = to_categorical(self.y)
else:
train_y = self.y
for i in range(self.classes):
fpr[i],tpr[i],_ = metrics.roc_curve(train_y[:,i][:,0],self.model_predict_proba[i][:,0])
roc_auc[i] = metrics.auc(fpr[i],tpr[i])
#micro-average ROC
fpr['micro'],tpr['micro'],_ = metrics.roc_curve(train_y.ravel(),self.model_predict_proba.ravel())
roc_auc['micro'] = metrics.auc(fpr['micro'],tpr['micro'])
#macro-average ROC
all_fpr = np.unique(np.concatenate([fpr[i] for i in range(self.classes)]))
mean_tpr = np.zeros_like(all_fpr)
for i in range(self.classes):
mean_tpr += np.interp(all_fpr,fpr[i],tpr[i])
mean_tpr /= self.classes
fpr['macro'] = all_fpr
tpr['macro'] = mean_tpr
roc_auc['macro'] = metrics.auc(fpr['macro'],tpr['macro'])
############################計算ROC&AUC############################
self.fpr = fpr #false positive rate
self.tpr = tpr #true positive rate
self.roc_auc = roc_auc # AUC
tEnd = time.time()
print('Complete! Cost ' + str(round(tEnd - tStart,2)) + 's')
def calculate_auc(self,num = 1):
if num <= self.classes: #Y處理
auc = self.roc_auc[num]
else:
print('num is wrong!')
return auc
def calculate_cap30(self,num = 1):
output = []
predict = copy.deepcopy(self.model_predict_proba)
columns_prob = 'prob_' + str(num)
df = pd.merge(pd.DataFrame(self.y[:,num],columns = ['y']),
pd.DataFrame(predict[num][:,1],columns = [columns_prob]),
left_index = True, right_index = True)
df_ = df.sort_values(by = columns_prob,ascending = False)
cap30 = sum(df_['y'][:int(len(df_['y'])*3/10)])/sum(df['y'])
return cap30
def calculate_ks(self,num = 1):
output = []
predict = copy.deepcopy(self.model_predict_proba)
columns_prob = 'prob_' + str(num)
df = pd.merge(pd.DataFrame(self.y[:,num],columns = ['y']),
pd.DataFrame(predict[num][:,1],columns = [columns_prob]),
left_index = True, right_index = True)
output = []
for i in range(1,11): #排序並計算
output.append(abs(round(len(df)*i/10)/len(df) -
sum(df.sort_values(by = columns_prob,ascending=False)[:round(len(df)*i/10)]['y'])/sum(df['y'])))
ks = max(output)
return ks
def calculate_gini(self,num = 1):
output = []
predict = copy.deepcopy(self.model_predict_proba)
columns_prob = 'prob_' + str(num)
df = pd.merge(pd.DataFrame(self.y[:,num],columns = ['y']),
pd.DataFrame(predict[num][:,1],columns = [columns_prob]),
left_index = True, right_index = True)
gini_list = []
for i in range(1,11): #計算Gini
if i == 1:
gini_list.append((sum(df.sort_values(by = columns_prob,ascending=False)[:round(len(df)/10)]['y'])/ \
sum(df['y']))*0.1)
else:
gini_list.append((sum(df.sort_values(by = columns_prob,ascending=False)[round(len(df)*(i-1)/10):round(len(df)*i/10)]['y'])/ \
sum(df['y']))*(2*i-1)/10)
gini = 1-sum(gini_list)
return gini
def calculate_all(self, num = 1):
result = []
result.append([self.calculate_ks(num = num),self.calculate_gini(num = num),
self.calculate_cap30(num = num),self.calculate_auc(num = num)])
df = pd.DataFrame(result, columns = ['ks','gini','cap30','auc'],index=[[num]])
return df
############################計算num類別預測結果,以機率排序切成十等分############################
#########################################分類每等分細節#########################################
def calculate_detail(self, num = 1):
output = []
predict = copy.deepcopy(self.model_predict_proba)
columns_prob = 'prob_' + str(num)
df = pd.merge(pd.DataFrame(self.y[:,num],columns = ['y']),
pd.DataFrame(predict[num][:,1],columns = [columns_prob]),
left_index = True, right_index = True)
for i in range(1,11): #排序並計算
df_ = df.sort_values(by = columns_prob,ascending = False)[round(np.shape(df)[0]*(i-1)/10):round(np.shape(df)[0]*i/10)]
output.append([i, #rank
round(np.shape(df)[0]*i/10)-round(np.shape(df)[0]*(i-1)/10), #人數
round(np.shape(df)[0]*i/10), #累積人數
sum(df_['y']), #y數量
sum(df.sort_values(by = columns_prob,ascending=False)[:round(np.shape(df)[0]*i/10)]['y']), #累積y數量
sum(df_['y'])/(round(np.shape(df)[0]*i/10)-round(np.shape(df)[0]*(i-1)/10)), #y率
abs(round(np.shape(df)[0]*i/10)/np.shape(df)[0] - sum(df.sort_values(by = columns_prob,ascending=False)[:round(np.shape(df)[0]*i/10)]['y'])/sum(df['y'])),
0.]) #預留Gini位置
df_output = pd.DataFrame(output,columns = ['rank','人數','累積人數','y','累積y','y率','KS','Gini'])
gini_list = []
for i in range(1,11): #計算Gini
if i == 1:
gini_list.append((sum(df.sort_values(by = columns_prob,ascending=False)[:round(len(df)/10)]['y'])/ \
sum(df['y']))*0.1)
else:
gini_list.append((sum(df.sort_values(by = columns_prob,ascending=False)[round(len(df)*(i-1)/10):round(len(df)*i/10)]['y'])/ \
sum(df['y']))*(2*i-1)/10)
df_output['Gini'] = gini_list
return df_output
#############################劃出各類別的ROC CURVE及AUC#############################
def ROC_AUC_plot(self, Title = '',figsize = (10,8), fontsize = 12, save = None):
plt.figure(figsize = (10,8))
plt.style.use('seaborn')
plt.plot(self.fpr['macro'], self.tpr['macro'],
label = 'macro-avg ROC curve(AUC={0:0.2f})'.format(self.roc_auc['macro']),
color = 'navy', linestyle = ':', linewidth = 4)
for i in range(len(self.roc_auc)-2):
plt.plot(self.fpr[i],self.tpr[i],
label = 'ROC curve of class{0}(AUC={1:0.2f})'.format(i, self.roc_auc[i]))
plt.plot([0,1],[0,1],'k--')
plt.legend(loc = 'lower right', fontsize = fontsize)
plt.title(Title, fontsize = fontsize)
plt.xlabel('False Positive Rate', fontsize = fontsize)
plt.ylabel('True Positive Rate', fontsize = fontsize)
plt.tick_params(axis = 'x', labelsize = fontsize)
plt.tick_params(axis = 'y', labelsize = fontsize)
plt.xlim([-0.05,1.05])
plt.ylim([-0.05,1.05])
if (save):
plt.savefig(save)
plt.show()
# In[ ]:
| 41.927461 | 181 | 0.522491 |
ac4d6219dfd4c78ef33be7127dede2b7ca38bddd | 3,159 | py | Python | app/recipe/tests/test_recipe_api.py | fdelacruz/recipe-app-api | 3e8974f571895ede0515a77435b96de8b4f0b0d1 | [
"MIT"
] | null | null | null | app/recipe/tests/test_recipe_api.py | fdelacruz/recipe-app-api | 3e8974f571895ede0515a77435b96de8b4f0b0d1 | [
"MIT"
] | null | null | null | app/recipe/tests/test_recipe_api.py | fdelacruz/recipe-app-api | 3e8974f571895ede0515a77435b96de8b4f0b0d1 | [
"MIT"
] | null | null | null | from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Recipe, Tag, Ingredient
from recipe.serializers import RecipeSerializer, RecipeDetailSerializer
RECIPES_URL = reverse('recipe:recipe-list')
def sample_ingredient(user, name='Cinnamon'):
"""Create and return a sample ingredient"""
return Ingredient.create(user=user, name=name)
def detail_url(recipe_id):
"""Return recipe detail URL"""
return reverse('recipe:recipe-detail', args=[recipe_id])
def sample_tag(user, name='Main course'):
"""Create and return a sample tag"""
return Tag.objects.create(user=user, name=name)
def sample_recipe(user, **params):
"""Create and return a sample recipe"""
defaults = {
'title': 'Sample recipe',
'time_minutes': 10,
'price': 5.00
}
defaults.update(params)
return Recipe.objects.create(user=user, **defaults)
class PublicRecipeApiTests(TestCase):
"""Test unauthenticated recipe API access"""
def setUp(self):
self.client = APIClient()
def test_auth_required(self):
"""Test that authentication is required"""
res = self.client.get(RECIPES_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateRecipeApiTests(TestCase):
"""Test authenticated recipe API access"""
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(
'test@me.com',
'testpass'
)
self.client.force_authenticate(self.user)
def test_retrieve_recipes(self):
"""Test retrieving a list of recipes"""
sample_recipe(user=self.user)
sample_recipe(user=self.user)
res = self.client.get(RECIPES_URL)
recipes = Recipe.objects.all().order_by('-id')
serializer = RecipeSerializer(recipes, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_recipes_limited_to_user(self):
"""Test retrieving recipes for user"""
user2 = get_user_model().objects.create_user(
'other@me.com',
'password123'
)
sample_recipe(user=user2)
sample_recipe(user=self.user)
res = self.client.get(RECIPES_URL)
recipes = Recipe.objects.filter(user=self.user)
serializer = RecipeSerializer(recipes, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data, serializer.data)
def test_view_recipe_detail(self):
"""Test viewing a recipe detail"""
recipe = sample_recipe(user=self.user)
recipe.tags.add(sample_tag(user=self.user))
recipe.ingredients.add(sample_ingredient(user=self.user))
url = detail_url(recipe.id)
res = self.client.get(url)
serializer = RecipeDetailSerializer(recipe)
self.assertEqual(res.data, serializer.data)
| 28.718182 | 71 | 0.674897 |
7e911e15153f49dd8ca95a5b2fd5247a819d03f7 | 3,383 | py | Python | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_08_01_preview/_configuration.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_08_01_preview/_configuration.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2018_08_01_preview/_configuration.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
VERSION = "unknown"
class ContainerServiceClientConfiguration(Configuration):
"""Configuration for ContainerServiceClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
**kwargs # type: Any
):
# type: (...) -> None
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(ContainerServiceClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2018-08-01-preview"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-containerservice/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| 47.647887 | 172 | 0.691398 |
371089936ef59971826dc076482d6683f8f08569 | 2,405 | py | Python | saleor/discount/migrations/0010_auto_20180724_1251.py | acabezasg/urpi-master | 7c9cd0fbe6d89dad70652482712ca38b21ba6f84 | [
"BSD-3-Clause"
] | 6 | 2019-01-06T08:39:20.000Z | 2022-03-04T18:07:47.000Z | saleor/discount/migrations/0010_auto_20180724_1251.py | acabezasg/urpi-master | 7c9cd0fbe6d89dad70652482712ca38b21ba6f84 | [
"BSD-3-Clause"
] | 64 | 2019-02-11T17:02:05.000Z | 2021-06-25T15:16:57.000Z | saleor/discount/migrations/0010_auto_20180724_1251.py | acabezasg/urpi-master | 7c9cd0fbe6d89dad70652482712ca38b21ba6f84 | [
"BSD-3-Clause"
] | 2 | 2019-01-08T02:32:42.000Z | 2021-07-05T14:05:55.000Z | # Generated by Django 2.0.3 on 2018-07-24 17:51
import datetime
from django.db import migrations, models
import django_countries.fields
class Migration(migrations.Migration):
dependencies = [
('product', '0065_auto_20180719_0520'),
('discount', '0009_auto_20180719_0520'),
]
operations = [
migrations.RenameField(
model_name='voucher',
old_name='limit',
new_name='min_amount_spent',
),
migrations.RemoveField(
model_name='voucher',
name='apply_to',
),
migrations.RemoveField(
model_name='voucher',
name='category',
),
migrations.RemoveField(
model_name='voucher',
name='product',
),
migrations.AddField(
model_name='sale',
name='end_date',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='sale',
name='start_date',
field=models.DateField(default=datetime.date.today),
),
migrations.AddField(
model_name='voucher',
name='apply_once_per_order',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='voucher',
name='categories',
field=models.ManyToManyField(blank=True, to='product.Category'),
),
migrations.AddField(
model_name='voucher',
name='collections',
field=models.ManyToManyField(blank=True, to='product.Collection'),
),
migrations.AddField(
model_name='voucher',
name='countries',
field=django_countries.fields.CountryField(blank=True, max_length=749, multiple=True),
),
migrations.AddField(
model_name='voucher',
name='products',
field=models.ManyToManyField(blank=True, to='product.Product'),
),
migrations.AlterField(
model_name='voucher',
name='type',
field=models.CharField(choices=[('value', 'All products'), ('product', 'Specific products'), ('collection', 'Specific collections of products'), ('category', 'Specific categories of products'), ('shipping', 'Shipping')], default='value', max_length=20),
),
]
| 32.5 | 265 | 0.567568 |
8edef83b90fe064eda0287db5647da839efc3cc5 | 2,992 | py | Python | zstackwoodpecker/zstackwoodpecker/operations/primarystorage_operations.py | hyhhui/zstack-woodpecker | ac36ae033cc521e2f877763de3ff55e4762e3ae0 | [
"Apache-2.0"
] | null | null | null | zstackwoodpecker/zstackwoodpecker/operations/primarystorage_operations.py | hyhhui/zstack-woodpecker | ac36ae033cc521e2f877763de3ff55e4762e3ae0 | [
"Apache-2.0"
] | null | null | null | zstackwoodpecker/zstackwoodpecker/operations/primarystorage_operations.py | hyhhui/zstack-woodpecker | ac36ae033cc521e2f877763de3ff55e4762e3ae0 | [
"Apache-2.0"
] | 2 | 2020-03-12T03:11:28.000Z | 2021-07-26T01:57:58.000Z | '''
All primary_storage operations for test.
@author: Youyk
'''
import apibinding.api_actions as api_actions
import zstackwoodpecker.test_util as test_util
import account_operations
import apibinding.inventory as inventory
def create_nfs_primary_storage(primary_storage_option, session_uuid=None):
action = api_actions.AddNfsPrimaryStorageAction()
action.timeout = 30000
action.name = primary_storage_option.get_name()
action.description = primary_storage_option.get_description()
action.type = primary_storage_option.get_type()
action.url = primary_storage_option.get_url()
action.zoneUuid = primary_storage_option.get_zone_uuid()
evt = account_operations.execute_action_with_session(action, session_uuid)
test_util.action_logger('Create Primary Storage [uuid:] %s [name:] %s' % \
(evt.inventory.uuid, action.name))
return evt.inventory
def delete_primary_storage(primary_storage_uuid, session_uuid=None):
'''
Delete PS will delete all VMs and Volumes using this ps.
'''
action = api_actions.DeletePrimaryStorageAction()
action.uuid = primary_storage_uuid
action.timeout = 600000
test_util.action_logger('Delete Primary Storage [uuid:] %s' % primary_storage_uuid)
evt = account_operations.execute_action_with_session(action, session_uuid)
return evt.inventory
def attach_primary_storage(primary_storage_uuid, cluster_uuid, session_uuid=None):
action = api_actions.AttachPrimaryStorageToClusterAction()
action.clusterUuid = cluster_uuid
action.primaryStorageUuid = primary_storage_uuid
action.timeout = 30000
test_util.action_logger('Attach Primary Storage [uuid:] %s to Cluster [uuid:] %s' % \
(primary_storage_uuid, cluster_uuid))
evt = account_operations.execute_action_with_session(action, session_uuid)
return evt.inventory
def detach_primary_storage(primary_storage_uuid, cluster_uuid, \
session_uuid=None):
'''
Detach PS will stop all VMs using this volume.
'''
action = api_actions.DetachPrimaryStorageFromClusterAction()
action.clusterUuid = cluster_uuid
action.primaryStorageUuid = primary_storage_uuid
action.timeout = 300000
test_util.action_logger('Detach Primary Storage [uuid:] %s from Cluster [uuid:] %s' % \
(primary_storage_uuid, cluster_uuid))
evt = account_operations.execute_action_with_session(action, session_uuid)
return evt.inventory
def change_primary_storage_state(primary_storage_uuid, state, session_uuid=None):
action = api_actions.ChangePrimaryStorageStateAction()
action.uuid = primary_storage_uuid
action.stateEvent = state
action.timeout = 300000
test_util.action_logger('Change Primary Storage [uuid:] %s to [state:] %s' \
% (primary_storage_uuid, state))
evt = account_operations.execute_action_with_session(action, session_uuid)
return evt.inventory
| 42.742857 | 92 | 0.745321 |
f8f998223de5894e4c51e0d34cb69b88a879ff4b | 228 | py | Python | MoogTools/generateSpectrum.py | soylentdeen/MoogPy | 9485a7e302ef4d4339013f27672d1d5e7059a41f | [
"MIT"
] | 5 | 2015-08-21T17:18:15.000Z | 2021-09-03T15:55:35.000Z | MoogTools/generateSpectrum.py | soylentdeen/MoogPy | 9485a7e302ef4d4339013f27672d1d5e7059a41f | [
"MIT"
] | null | null | null | MoogTools/generateSpectrum.py | soylentdeen/MoogPy | 9485a7e302ef4d4339013f27672d1d5e7059a41f | [
"MIT"
] | 4 | 2016-03-28T09:39:44.000Z | 2019-07-20T07:47:39.000Z | #!/usr/bin/python
import MoogTools
import sys
moogPyConfigFile = sys.argv[1]
flavor = sys.argv[2]
Moog = MoogTools.MoogStokes(moogPyConfigFile, fileBase=flavor, moogInstance='Alpha', progressBar = True)
Moog.run(saveRaw=True)
| 25.333333 | 105 | 0.776316 |
9e137dde54ae747725bd6fd27343f60e8c3bf4fd | 435 | py | Python | Chapter9/lesson60/pybotweb.py | beproud/yasashiipython2e | 00afb264ec8428b0149ac12854c8e11c30bee338 | [
"MIT"
] | null | null | null | Chapter9/lesson60/pybotweb.py | beproud/yasashiipython2e | 00afb264ec8428b0149ac12854c8e11c30bee338 | [
"MIT"
] | 2 | 2020-08-03T02:31:35.000Z | 2020-08-03T03:02:25.000Z | Chapter9/lesson60/pybotweb.py | beproud/yasashiipython2e | 00afb264ec8428b0149ac12854c8e11c30bee338 | [
"MIT"
] | null | null | null | from bottle import route, run, template, request
from pybot import pybot
@route('/hello')
def hello():
return template('pybot_template', input_text='', output_text='')
@route('/hello', method='POST')
def do_hello():
input_text = request.forms.input_text
output_text = pybot(input_text)
return template('pybot_template', input_text=input_text, output_text=output_text)
run(host='localhost', port=8080, debug=True)
| 24.166667 | 85 | 0.731034 |
e9044a0e04debc89ba9d53f3668e45a4b059c57d | 1,461 | py | Python | bin/narrowpeak2summitFlankingBed.py | odingsy/NGStoolkit | 68d73810351550b9ba75f9184f26bc8e55708fcc | [
"MIT"
] | 2 | 2018-05-05T06:24:51.000Z | 2021-07-04T22:24:13.000Z | bin/narrowpeak2summitFlankingBed.py | odingsy/NGStoolkit | 68d73810351550b9ba75f9184f26bc8e55708fcc | [
"MIT"
] | null | null | null | bin/narrowpeak2summitFlankingBed.py | odingsy/NGStoolkit | 68d73810351550b9ba75f9184f26bc8e55708fcc | [
"MIT"
] | 2 | 2020-12-27T22:02:29.000Z | 2021-05-28T20:28:26.000Z | #!/usr/bin/env python
import sys
import os
import argparse
import bed
import generalUtils
parser = argparse.ArgumentParser(description='takes bed as input, get the middle point and extend it to both sides')
parser.add_argument('-i', nargs='?', type=argparse.FileType('r'), default=sys.stdin, help='input')
parser.add_argument('-o', nargs='?', type=argparse.FileType('w'), default=sys.stdout, help='output')
parser.add_argument('-w', required= True, help='windowSize')
parser.add_argument('-g', required= False, default=False, help='genomeFile')
parser.add_argument('--randomMid', required= False, action='store_true', help='for cases of .5 middle point, randomly select between positions 0 or 1')
args = parser.parse_args()
bedFile = args.i
output = args.o
windowSize = int(args.w)
if args.g:
chromosomeSizes = {}
for line in open(args.g, 'r'):
ll = line.split('\t')
chromosomeSizes[ll[0]] = int(ll[1])
def getInterval(line, randomness=False):
bedLine = bed.bedline(line)
middlePoint = bedLine.start() + int(bedLine.getField(10))
start = middlePoint - windowSize
end = middlePoint + windowSize
if args.g:
chromosome = bedLine.chromosome()
chrEnd = chromosomeSizes[chromosome]
if start > 0 and end < chrEnd:
return bedLine.newline(start, end)
return False
return bedLine.newline(start, end)
generalUtils.lineBasedFileOperation(bedFile, output, getInterval, [])
| 36.525 | 151 | 0.698152 |
3690099749126536bfe5ba28cd63acdfdca500a2 | 1,466 | py | Python | tools/generate_taint_models/get_REST_api_sources.py | s-pace/pyre-check | 2b71dcf22e4672567cfe0dfef356f11646d66244 | [
"MIT"
] | 5 | 2019-02-14T19:46:47.000Z | 2020-01-16T05:48:45.000Z | tools/generate_taint_models/get_REST_api_sources.py | s-pace/pyre-check | 2b71dcf22e4672567cfe0dfef356f11646d66244 | [
"MIT"
] | null | null | null | tools/generate_taint_models/get_REST_api_sources.py | s-pace/pyre-check | 2b71dcf22e4672567cfe0dfef356f11646d66244 | [
"MIT"
] | 2 | 2019-02-14T19:46:23.000Z | 2020-07-13T03:53:04.000Z | # Copyright (c) 2016-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from typing import Callable, Iterable, List, Optional
from .function_tainter import taint_functions
from .model import CallableModel
from .model_generator import CallableModelGenerator
from .view_generator import DjangoUrls, get_all_views
class RESTApiSourceGenerator(CallableModelGenerator):
def __init__(
self,
django_urls: DjangoUrls,
whitelisted_classes: Optional[List[str]] = None,
whitelisted_views: Optional[List[str]] = None,
taint_annotation: str = "TaintSource[UserControlled]",
) -> None:
self.django_urls: DjangoUrls = django_urls
self.whitelisted_classes: List[str] = whitelisted_classes or []
self.whitelisted_views: List[str] = whitelisted_views or []
self.taint_annotation = taint_annotation
def gather_functions_to_model(self) -> Iterable[Callable[..., object]]:
return get_all_views(self.django_urls)
def compute_models(
self, functions_to_model: Iterable[Callable[..., object]]
) -> Iterable[CallableModel]:
return taint_functions(
functions_to_model,
whitelisted_classes=self.whitelisted_classes,
whitelisted_views=self.whitelisted_views,
taint_annotation=self.taint_annotation,
)
| 34.904762 | 75 | 0.71487 |
d79795c5170cf6305c9f5a93281d34495c05ebf6 | 3,132 | py | Python | sdc/tests/test_series_map.py | dlee992/sdc | 1ebf55c00ef38dfbd401a70b3945e352a5a38b87 | [
"BSD-2-Clause"
] | 540 | 2017-06-19T16:29:24.000Z | 2019-05-21T09:30:07.000Z | sdc/tests/test_series_map.py | dlee992/sdc | 1ebf55c00ef38dfbd401a70b3945e352a5a38b87 | [
"BSD-2-Clause"
] | 389 | 2019-10-30T18:56:46.000Z | 2022-03-09T08:21:36.000Z | sdc/tests/test_series_map.py | dlee992/sdc | 1ebf55c00ef38dfbd401a70b3945e352a5a38b87 | [
"BSD-2-Clause"
] | 36 | 2017-06-19T16:29:15.000Z | 2019-04-26T09:22:39.000Z | # *****************************************************************************
# Copyright (c) 2020, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
import pandas as pd
import unittest
from sdc.tests.test_base import TestCase
from sdc.tests.test_utils import skip_numba_jit
GLOBAL_VAL = 2
class TestSeries_map(TestCase):
def test_series_map1(self):
def test_impl(S):
return S.map(lambda a: 2 * a)
hpat_func = self.jit(test_impl)
S = pd.Series([1.0, 2., 3., 4., 5.])
pd.testing.assert_series_equal(hpat_func(S), test_impl(S))
def test_series_map_global1(self):
def test_impl(S):
return S.map(lambda a: a + GLOBAL_VAL)
hpat_func = self.jit(test_impl)
S = pd.Series([1.0, 2., 3., 4., 5.])
pd.testing.assert_series_equal(hpat_func(S), test_impl(S))
@skip_numba_jit
def test_series_map_tup1(self):
def test_impl(S):
return S.map(lambda a: (a, 2 * a))
hpat_func = self.jit(test_impl)
S = pd.Series([1.0, 2., 3., 4., 5.])
pd.testing.assert_series_equal(hpat_func(S), test_impl(S))
@skip_numba_jit
def test_series_map_tup_map1(self):
def test_impl(S):
A = S.map(lambda a: (a, 2 * a))
return A.map(lambda a: a[1])
hpat_func = self.jit(test_impl)
S = pd.Series([1.0, 2., 3., 4., 5.])
pd.testing.assert_series_equal(hpat_func(S), test_impl(S))
def test_series_map_dict(self):
def test_impl(S):
return S.map({2.: 42., 4.: 3.14})
hpat_func = self.jit(test_impl)
S = pd.Series([1., 2., 3., 4., 5.])
pd.testing.assert_series_equal(hpat_func(S), test_impl(S))
if __name__ == "__main__":
unittest.main()
| 36.847059 | 79 | 0.644955 |
13e411d8904f76c2270201df68c2306cb5da3e85 | 1,443 | py | Python | excluir.py | hilbertspace05/Tkinter-CRUD | b1b3a3b7557b0e5cb203e2b5e2ae4d3a29e9a5cb | [
"MIT"
] | null | null | null | excluir.py | hilbertspace05/Tkinter-CRUD | b1b3a3b7557b0e5cb203e2b5e2ae4d3a29e9a5cb | [
"MIT"
] | null | null | null | excluir.py | hilbertspace05/Tkinter-CRUD | b1b3a3b7557b0e5cb203e2b5e2ae4d3a29e9a5cb | [
"MIT"
] | null | null | null | import tkinter as tk
import mysql.connector
from mysql.connector import Error
import paginainicial
LARGE_FONT = ("Verdana", 12)
try:
cnx = mysql.connector.connect(user='root', password='',
host='127.0.0.1',
database='crud')
except Error as erro:
print(erro)
class Excluir(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
label = tk.Label(self, text="Excluir Dados", font=LARGE_FONT)
label.pack(pady=10, padx=10)
def excluirvalor():
prnome = NomeE.get()
try:
deletar_dados = "DELETE FROM usuarios WHERE Nome = '{}' ".format(prnome)
cursor = cnx.cursor()
cursor.execute(deletar_dados)
cnx.commit()
Resultado['text'] = "Dados excluídos com sucesso!"
except Error:
Resultado['text'] = "Aconteceu um erro."
NomeL = tk.Label(self, text="Nome")
NomeL.pack()
NomeE = tk.Entry(self, bd=5)
NomeE.pack()
Resultado = tk.Label(self,text="")
Resultado.pack()
button2 = tk.Button(self, text="Excluir", command=excluirvalor)
button2.pack()
button1 = tk.Button(self, text="Voltar",
command=lambda: controller.show_frame(paginainicial.PaginaInicial))
button1.pack() | 30.702128 | 95 | 0.557173 |
eb691f8835f16a98a745e79f5ef9c8edef06951d | 1,787 | py | Python | internal/notes/builtin-SAVE/packages/libxres/package.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | 1 | 2019-01-17T20:07:19.000Z | 2019-01-17T20:07:19.000Z | internal/notes/builtin-SAVE/packages/libxres/package.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | null | null | null | internal/notes/builtin-SAVE/packages/libxres/package.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | 2 | 2019-08-06T18:13:57.000Z | 2021-11-05T18:19:49.000Z | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libxres(AutotoolsPackage):
"""libXRes - X-Resource extension client library."""
homepage = "http://cgit.freedesktop.org/xorg/lib/libXRes"
url = "https://www.x.org/archive/individual/lib/libXres-1.0.7.tar.gz"
version('1.0.7', '7fad9ab34201bb4adffcbf0cd7e87a89')
depends_on('libx11')
depends_on('libxext')
depends_on('xextproto', type='build')
depends_on('resourceproto@1.0:', type='build')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('util-macros', type='build')
| 41.55814 | 78 | 0.672636 |
cc6b0211b8d3c2ab70408809e8859fadeb186571 | 1,271 | py | Python | MCTSnet/models/backup.py | haixuanTao/MCTSnet | bac52fcba522291e07239bc998ff0441c8258752 | [
"MIT"
] | 15 | 2020-02-04T21:41:25.000Z | 2022-03-28T16:30:48.000Z | MCTSnet/models/backup.py | haixuanTao/MCTSnet | bac52fcba522291e07239bc998ff0441c8258752 | [
"MIT"
] | 9 | 2019-04-22T08:53:07.000Z | 2022-03-11T23:41:52.000Z | MCTSnet/models/backup.py | haixuanTao/MCTSnet | bac52fcba522291e07239bc998ff0441c8258752 | [
"MIT"
] | 4 | 2019-09-18T12:08:28.000Z | 2020-08-31T19:51:19.000Z | import torch
import torch.nn.functional as F
import torch.nn as nn
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
class BetaMLP(nn.Module):
def __init__(self, embeddings_size=128):
super().__init__()
self.fc1 = nn.Linear(embeddings_size * 2 + 2, embeddings_size).to(device)
def forward(self, h_i, h_o, reward, action):
x = torch.cat((h_i, h_o, reward, action), 1)
h = self.fc1(x.view(x.size(0), -1))
return F.relu(h)
class BetaGate(nn.Module):
def __init__(self, embeddings_size=128):
super().__init__()
self.fc1 = nn.Linear(embeddings_size * 2 + 2, embeddings_size).to(device)
def forward(self, h_i, h_o, reward, action):
x = torch.cat((h_i, h_o, reward, action), 1)
h = self.fc1(x.view(x.size(0), -1))
return F.softmax(h, dim=1)
class Beta(nn.Module):
""" gated residual with learned gate """
def __init__(self, embeddings_size=128):
super().__init__()
self.update = BetaMLP(embeddings_size)
self.gate = BetaGate(embeddings_size)
def forward(self, h_i, h_o, reward, action):
x = self.update(h_i, h_o, reward, action)
g = self.gate(h_i, h_o, reward, action)
return h_i + g * x
| 29.55814 | 81 | 0.623918 |
315749810f3e6b11b8ea9db48524de4f0bb862e5 | 4,064 | py | Python | test/test_converters_and_test_searchspaces/test_sample_configuration_spaces.py | Yatoom/ConfigSpace | 45d577c11b2914ffdcb3270d83b42b8a85932147 | [
"BSD-3-Clause"
] | 148 | 2016-04-06T05:01:14.000Z | 2022-03-22T12:38:43.000Z | test/test_converters_and_test_searchspaces/test_sample_configuration_spaces.py | Yatoom/ConfigSpace | 45d577c11b2914ffdcb3270d83b42b8a85932147 | [
"BSD-3-Clause"
] | 222 | 2016-06-29T09:42:42.000Z | 2022-03-29T23:30:35.000Z | test/test_converters_and_test_searchspaces/test_sample_configuration_spaces.py | Yatoom/ConfigSpace | 45d577c11b2914ffdcb3270d83b42b8a85932147 | [
"BSD-3-Clause"
] | 80 | 2016-04-20T02:47:39.000Z | 2022-02-24T13:02:14.000Z | # Copyright (c) 2014-2016, ConfigSpace developers
# Matthias Feurer
# Katharina Eggensperger
# and others (see commit history).
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import unittest
import ConfigSpace
import ConfigSpace.util
import ConfigSpace.read_and_write.pcs as pcs_parser
import ConfigSpace.read_and_write.pcs_new as pcs_new_parser
class ExampleSearchSpacesTest(unittest.TestCase):
pass
def generate(configuration_space_path):
def run_test(self):
try:
with open(configuration_space_path) as fh:
cs = pcs_parser.read(fh)
except Exception:
with open(configuration_space_path) as fh:
cs = pcs_new_parser.read(fh)
default = cs.get_default_configuration()
cs._check_configuration_rigorous(default)
for i in range(10):
neighborhood = ConfigSpace.util.get_one_exchange_neighbourhood(
default, seed=i)
for shuffle, n in enumerate(neighborhood):
n.is_valid_configuration()
cs._check_configuration_rigorous(n)
if shuffle == 10:
break
# Sample a little bit
for i in range(10):
cs.seed(i)
configurations = cs.sample_configuration(size=5)
for j, c in enumerate(configurations):
c.is_valid_configuration()
cs._check_configuration_rigorous(c)
neighborhood = ConfigSpace.util.get_one_exchange_neighbourhood(
c, seed=i)
for shuffle, n in enumerate(neighborhood):
n.is_valid_configuration()
cs._check_configuration_rigorous(n)
if shuffle == 20:
break
return run_test
this_file = os.path.abspath(__file__)
this_directory = os.path.dirname(this_file)
configuration_space_path = os.path.join(this_directory,
"..", "test_searchspaces")
configuration_space_path = os.path.abspath(configuration_space_path)
pcs_files = sorted(os.listdir(configuration_space_path))
for pcs_file in pcs_files:
if '.pcs' in pcs_file:
full_path = os.path.join(configuration_space_path, pcs_file)
setattr(ExampleSearchSpacesTest, 'test_%s' % pcs_file.replace('.', '_'),
generate(full_path))
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTest(ExampleSearchSpacesTest(
methodName='test_auto-sklearn_2017_04_pcs'))
runner = unittest.TextTestRunner().run(suite)
| 41.050505 | 81 | 0.688976 |
adcd57d8e14636885856d1325af237654ecb5ac1 | 22,989 | py | Python | sysinv/sysinv/sysinv/sysinv/openstack/common/rpc/impl_qpid.py | etaivan/stx-config | 281e1f110973f96e077645fb01f67b646fc253cc | [
"Apache-2.0"
] | null | null | null | sysinv/sysinv/sysinv/sysinv/openstack/common/rpc/impl_qpid.py | etaivan/stx-config | 281e1f110973f96e077645fb01f67b646fc253cc | [
"Apache-2.0"
] | null | null | null | sysinv/sysinv/sysinv/sysinv/openstack/common/rpc/impl_qpid.py | etaivan/stx-config | 281e1f110973f96e077645fb01f67b646fc253cc | [
"Apache-2.0"
] | 1 | 2021-01-05T16:24:58.000Z | 2021-01-05T16:24:58.000Z | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# Copyright 2011 - 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import itertools
import time
import uuid
import eventlet
import greenlet
from oslo_config import cfg
from sysinv.openstack.common.gettextutils import _
from sysinv.openstack.common import importutils
from sysinv.openstack.common import jsonutils
from sysinv.openstack.common import log as logging
from sysinv.openstack.common.rpc import amqp as rpc_amqp
from sysinv.openstack.common.rpc import common as rpc_common
qpid_messaging = importutils.try_import("qpid.messaging")
qpid_exceptions = importutils.try_import("qpid.messaging.exceptions")
LOG = logging.getLogger(__name__)
qpid_opts = [
cfg.StrOpt('qpid_hostname',
default='localhost',
help='Qpid broker hostname'),
cfg.IntOpt('qpid_port',
default=5672,
help='Qpid broker port'),
cfg.ListOpt('qpid_hosts',
default=['$qpid_hostname:$qpid_port'],
help='Qpid HA cluster host:port pairs'),
cfg.StrOpt('qpid_username',
default='',
help='Username for qpid connection'),
cfg.StrOpt('qpid_password',
default='',
help='Password for qpid connection',
secret=True),
cfg.StrOpt('qpid_sasl_mechanisms',
default='',
help='Space separated list of SASL mechanisms to use for auth'),
cfg.IntOpt('qpid_heartbeat',
default=60,
help='Seconds between connection keepalive heartbeats'),
cfg.StrOpt('qpid_protocol',
default='tcp',
help="Transport to use, either 'tcp' or 'ssl'"),
cfg.BoolOpt('qpid_tcp_nodelay',
default=True,
help='Disable Nagle algorithm'),
]
cfg.CONF.register_opts(qpid_opts)
class ConsumerBase(object):
"""Consumer base class."""
def __init__(self, session, callback, node_name, node_opts,
link_name, link_opts):
"""Declare a queue on an amqp session.
'session' is the amqp session to use
'callback' is the callback to call when messages are received
'node_name' is the first part of the Qpid address string, before ';'
'node_opts' will be applied to the "x-declare" section of "node"
in the address string.
'link_name' goes into the "name" field of the "link" in the address
string
'link_opts' will be applied to the "x-declare" section of "link"
in the address string.
"""
self.callback = callback
self.receiver = None
self.session = None
addr_opts = {
"create": "always",
"node": {
"type": "topic",
"x-declare": {
"durable": True,
"auto-delete": True,
},
},
"link": {
"name": link_name,
"durable": True,
"x-declare": {
"durable": False,
"auto-delete": True,
"exclusive": False,
},
},
}
addr_opts["node"]["x-declare"].update(node_opts)
addr_opts["link"]["x-declare"].update(link_opts)
self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts))
self.reconnect(session)
def reconnect(self, session):
"""Re-declare the receiver after a qpid reconnect"""
self.session = session
self.receiver = session.receiver(self.address)
self.receiver.capacity = 1
def consume(self):
"""Fetch the message and pass it to the callback object"""
message = self.receiver.fetch()
try:
msg = rpc_common.deserialize_msg(message.content)
self.callback(msg)
except Exception:
LOG.exception(_("Failed to process message... skipping it."))
finally:
self.session.acknowledge(message)
def get_receiver(self):
return self.receiver
class DirectConsumer(ConsumerBase):
"""Queue/consumer class for 'direct'"""
def __init__(self, conf, session, msg_id, callback):
"""Init a 'direct' queue.
'session' is the amqp session to use
'msg_id' is the msg_id to listen on
'callback' is the callback to call when messages are received
"""
super(DirectConsumer, self).__init__(session, callback,
"%s/%s" % (msg_id, msg_id),
{"type": "direct"},
msg_id,
{"exclusive": True})
class TopicConsumer(ConsumerBase):
"""Consumer class for 'topic'"""
def __init__(self, conf, session, topic, callback, name=None,
exchange_name=None):
"""Init a 'topic' queue.
:param session: the amqp session to use
:param topic: is the topic to listen on
:paramtype topic: str
:param callback: the callback to call when messages are received
:param name: optional queue name, defaults to topic
"""
exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf)
super(TopicConsumer, self).__init__(session, callback,
"%s/%s" % (exchange_name, topic),
{}, name or topic, {})
class FanoutConsumer(ConsumerBase):
"""Consumer class for 'fanout'"""
def __init__(self, conf, session, topic, callback):
"""Init a 'fanout' queue.
'session' is the amqp session to use
'topic' is the topic to listen on
'callback' is the callback to call when messages are received
"""
super(FanoutConsumer, self).__init__(
session, callback,
"%s_fanout" % topic,
{"durable": False, "type": "fanout"},
"%s_fanout_%s" % (topic, uuid.uuid4().hex),
{"exclusive": True})
class Publisher(object):
"""Base Publisher class"""
def __init__(self, session, node_name, node_opts=None):
"""Init the Publisher class with the exchange_name, routing_key,
and other options
"""
self.sender = None
self.session = session
addr_opts = {
"create": "always",
"node": {
"type": "topic",
"x-declare": {
"durable": False,
# auto-delete isn't implemented for exchanges in qpid,
# but put in here anyway
"auto-delete": True,
},
},
}
if node_opts:
addr_opts["node"]["x-declare"].update(node_opts)
self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts))
self.reconnect(session)
def reconnect(self, session):
"""Re-establish the Sender after a reconnection"""
self.sender = session.sender(self.address)
def send(self, msg):
"""Send a message"""
self.sender.send(msg)
class DirectPublisher(Publisher):
"""Publisher class for 'direct'"""
def __init__(self, conf, session, msg_id):
"""Init a 'direct' publisher."""
super(DirectPublisher, self).__init__(session, msg_id,
{"type": "Direct"})
class TopicPublisher(Publisher):
"""Publisher class for 'topic'"""
def __init__(self, conf, session, topic):
"""init a 'topic' publisher.
"""
exchange_name = rpc_amqp.get_control_exchange(conf)
super(TopicPublisher, self).__init__(session,
"%s/%s" % (exchange_name, topic))
class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'"""
def __init__(self, conf, session, topic):
"""init a 'fanout' publisher.
"""
super(FanoutPublisher, self).__init__(
session,
"%s_fanout" % topic, {"type": "fanout"})
class NotifyPublisher(Publisher):
"""Publisher class for notifications"""
def __init__(self, conf, session, topic):
"""init a 'topic' publisher.
"""
exchange_name = rpc_amqp.get_control_exchange(conf)
super(NotifyPublisher, self).__init__(session,
"%s/%s" % (exchange_name, topic),
{"durable": True})
class Connection(object):
"""Connection object."""
pool = None
def __init__(self, conf, server_params=None):
if not qpid_messaging:
raise ImportError("Failed to import qpid.messaging")
self.session = None
self.consumers = {}
self.consumer_thread = None
self.proxy_callbacks = []
self.conf = conf
if server_params and 'hostname' in server_params:
# NOTE(russellb) This enables support for cast_to_server.
server_params['qpid_hosts'] = [
'%s:%d' % (server_params['hostname'],
server_params.get('port', 5672))
]
params = {
'qpid_hosts': self.conf.qpid_hosts,
'username': self.conf.qpid_username,
'password': self.conf.qpid_password,
}
params.update(server_params or {})
self.brokers = params['qpid_hosts']
self.username = params['username']
self.password = params['password']
self.connection_create(self.brokers[0])
self.reconnect()
def connection_create(self, broker):
# Create the connection - this does not open the connection
self.connection = qpid_messaging.Connection(broker)
# Check if flags are set and if so set them for the connection
# before we call open
self.connection.username = self.username
self.connection.password = self.password
self.connection.sasl_mechanisms = self.conf.qpid_sasl_mechanisms
# Reconnection is done by self.reconnect()
self.connection.reconnect = False
self.connection.heartbeat = self.conf.qpid_heartbeat
self.connection.transport = self.conf.qpid_protocol
self.connection.tcp_nodelay = self.conf.qpid_tcp_nodelay
def _register_consumer(self, consumer):
self.consumers[str(consumer.get_receiver())] = consumer
def _lookup_consumer(self, receiver):
return self.consumers[str(receiver)]
def reconnect(self):
"""Handles reconnecting and re-establishing sessions and queues"""
attempt = 0
delay = 1
while True:
# Close the session if necessary
if self.connection.opened():
try:
self.connection.close()
except qpid_exceptions.ConnectionError:
pass
broker = self.brokers[attempt % len(self.brokers)]
attempt += 1
try:
self.connection_create(broker)
self.connection.open()
except qpid_exceptions.ConnectionError as e:
msg_dict = dict(e=e, delay=delay)
msg = _("Unable to connect to AMQP server: %(e)s. "
"Sleeping %(delay)s seconds") % msg_dict
LOG.error(msg)
time.sleep(delay)
delay = min(2 * delay, 60)
else:
LOG.info(_('Connected to AMQP server on %s'), broker)
break
self.session = self.connection.session()
if self.consumers:
consumers = self.consumers
self.consumers = {}
for consumer in consumers.values():
consumer.reconnect(self.session)
self._register_consumer(consumer)
LOG.debug(_("Re-established AMQP queues"))
def ensure(self, error_callback, method, *args, **kwargs):
while True:
try:
return method(*args, **kwargs)
except (qpid_exceptions.Empty,
qpid_exceptions.ConnectionError) as e:
if error_callback:
error_callback(e)
self.reconnect()
def close(self):
"""Close/release this connection"""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.connection.close()
self.connection = None
def reset(self):
"""Reset a connection so it can be used again"""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.session.close()
self.session = self.connection.session()
self.consumers = {}
def declare_consumer(self, consumer_cls, topic, callback):
"""Create a Consumer using the class that was passed in and
add it to our list of consumers
"""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info)
def _declare_consumer():
consumer = consumer_cls(self.conf, self.session, topic, callback)
self._register_consumer(consumer)
return consumer
return self.ensure(_connect_error, _declare_consumer)
def iterconsume(self, limit=None, timeout=None):
"""Return an iterator that will consume from all queues/consumers"""
def _error_callback(exc):
if isinstance(exc, qpid_exceptions.Empty):
LOG.debug(_('Timed out waiting for RPC response: %s') %
str(exc))
raise rpc_common.Timeout()
else:
LOG.exception(_('Failed to consume message from queue: %s') %
str(exc))
def _consume():
nxt_receiver = self.session.next_receiver(timeout=timeout)
try:
self._lookup_consumer(nxt_receiver).consume()
except Exception:
LOG.exception(_("Error processing message. Skipping it."))
for iteration in itertools.count(0):
if limit and iteration >= limit:
raise StopIteration
yield self.ensure(_error_callback, _consume)
def cancel_consumer_thread(self):
"""Cancel a consumer thread"""
if self.consumer_thread is not None:
self.consumer_thread.kill()
try:
self.consumer_thread.wait()
except greenlet.GreenletExit:
pass
self.consumer_thread = None
def wait_on_proxy_callbacks(self):
"""Wait for all proxy callback threads to exit."""
for proxy_cb in self.proxy_callbacks:
proxy_cb.wait()
def publisher_send(self, cls, topic, msg):
"""Send to a publisher based on the publisher class"""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info)
def _publisher_send():
publisher = cls(self.conf, self.session, topic)
publisher.send(msg)
return self.ensure(_connect_error, _publisher_send)
def declare_direct_consumer(self, topic, callback):
"""Create a 'direct' queue.
In nova's use, this is generally a msg_id queue used for
responses for call/multicall
"""
self.declare_consumer(DirectConsumer, topic, callback)
def declare_topic_consumer(self, topic, callback=None, queue_name=None,
exchange_name=None):
"""Create a 'topic' consumer."""
self.declare_consumer(functools.partial(TopicConsumer,
name=queue_name,
exchange_name=exchange_name,
),
topic, callback)
def declare_fanout_consumer(self, topic, callback):
"""Create a 'fanout' consumer"""
self.declare_consumer(FanoutConsumer, topic, callback)
def direct_send(self, msg_id, msg):
"""Send a 'direct' message"""
self.publisher_send(DirectPublisher, msg_id, msg)
def topic_send(self, topic, msg, timeout=None):
"""Send a 'topic' message"""
#
# We want to create a message with attributes, e.g. a TTL. We
# don't really need to keep 'msg' in its JSON format any longer
# so let's create an actual qpid message here and get some
# value-add on the go.
#
# WARNING: Request timeout happens to be in the same units as
# qpid's TTL (seconds). If this changes in the future, then this
# will need to be altered accordingly.
#
qpid_message = qpid_messaging.Message(content=msg, ttl=timeout)
self.publisher_send(TopicPublisher, topic, qpid_message)
def fanout_send(self, topic, msg):
"""Send a 'fanout' message"""
self.publisher_send(FanoutPublisher, topic, msg)
def notify_send(self, topic, msg, **kwargs):
"""Send a notify message on a topic"""
self.publisher_send(NotifyPublisher, topic, msg)
def consume(self, limit=None):
"""Consume from all queues/consumers"""
it = self.iterconsume(limit=limit)
while True:
try:
next(it)
except StopIteration:
return
def consume_in_thread(self):
"""Consumer from all queues/consumers in a greenthread"""
def _consumer_thread():
try:
self.consume()
except greenlet.GreenletExit:
return
if self.consumer_thread is None:
self.consumer_thread = eventlet.spawn(_consumer_thread)
return self.consumer_thread
def create_consumer(self, topic, proxy, fanout=False):
"""Create a consumer that calls a method in a proxy object"""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
if fanout:
consumer = FanoutConsumer(self.conf, self.session, topic, proxy_cb)
else:
consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb)
self._register_consumer(consumer)
return consumer
def create_worker(self, topic, proxy, pool_name):
"""Create a worker that calls a method in a proxy object"""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb,
name=pool_name)
self._register_consumer(consumer)
return consumer
def join_consumer_pool(self, callback, pool_name, topic,
exchange_name=None):
"""Register as a member of a group of consumers for a given topic from
the specified exchange.
Exactly one member of a given pool will receive each message.
A message will be delivered to multiple pools, if more than
one is created.
"""
callback_wrapper = rpc_amqp.CallbackWrapper(
conf=self.conf,
callback=callback,
connection_pool=rpc_amqp.get_connection_pool(self.conf,
Connection),
)
self.proxy_callbacks.append(callback_wrapper)
consumer = TopicConsumer(conf=self.conf,
session=self.session,
topic=topic,
callback=callback_wrapper,
name=pool_name,
exchange_name=exchange_name)
self._register_consumer(consumer)
return consumer
def create_connection(conf, new=True):
"""Create a connection"""
return rpc_amqp.create_connection(
conf, new,
rpc_amqp.get_connection_pool(conf, Connection))
def multicall(conf, context, topic, msg, timeout=None):
"""Make a call that returns multiple times."""
return rpc_amqp.multicall(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def call(conf, context, topic, msg, timeout=None):
"""Sends a message on a topic and wait for a response."""
return rpc_amqp.call(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def cast(conf, context, topic, msg):
"""Sends a message on a topic without waiting for a response."""
return rpc_amqp.cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast(conf, context, topic, msg):
"""Sends a message on a fanout exchange without waiting for a response."""
return rpc_amqp.fanout_cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a topic to a specific server."""
return rpc_amqp.cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a fanout exchange to a specific server."""
return rpc_amqp.fanout_cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def notify(conf, context, topic, msg, envelope):
"""Sends a notification event on a topic."""
return rpc_amqp.notify(conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection),
envelope)
def cleanup():
return rpc_amqp.cleanup(Connection.pool)
| 35.313364 | 79 | 0.583235 |
e47d723179f80b97a4f84a891b34044c8970f7ed | 2,963 | py | Python | run_part_selector.py | SongweiGe/DoodlerGAN | d435d9b3c0579937cd3c22aa2051960ceb921785 | [
"MIT"
] | 92 | 2020-10-02T23:44:29.000Z | 2022-03-22T22:49:35.000Z | run_part_selector.py | SongweiGe/DoodlerGAN | d435d9b3c0579937cd3c22aa2051960ceb921785 | [
"MIT"
] | 4 | 2020-10-03T05:11:24.000Z | 2021-06-23T23:27:05.000Z | run_part_selector.py | SongweiGe/DoodlerGAN | d435d9b3c0579937cd3c22aa2051960ceb921785 | [
"MIT"
] | 11 | 2020-10-03T05:06:26.000Z | 2021-11-06T14:03:24.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
from retry.api import retry_call
from tqdm import tqdm
from part_selector import Trainer, NanException
from datetime import datetime
def train_from_folder(
data = '../../data',
results_dir = '../../results',
models_dir = '../../models',
name = 'default',
new = False,
load_from = -1,
n_part = 1,
image_size = 128,
network_capacity = 16,
batch_size = 3,
gradient_accumulate_every = 5,
num_train_steps = 150000,
learning_rate = 2e-4,
num_workers = None,
save_every = 1000,
num_image_tiles = 8,
):
model = Trainer(
name,
results_dir,
models_dir,
batch_size = batch_size,
gradient_accumulate_every = gradient_accumulate_every,
n_part = n_part,
image_size = image_size,
network_capacity = network_capacity,
lr = learning_rate,
num_workers = num_workers,
save_every = save_every,
)
if not new:
model.load(load_from)
else:
model.clear()
model.set_data_src(data, name)
for _ in tqdm(range(num_train_steps - model.steps), mininterval=10., desc=f'{name}<{data}>'):
retry_call(model.train, tries=3, exceptions=NanException)
if _ % 50 == 0:
model.print_log()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--data", type=str, default='../../data')
parser.add_argument("--results_dir", type=str, default='../results')
parser.add_argument("--models_dir", type=str, default='../models')
parser.add_argument("--name", type=str, default='default')
parser.add_argument("--load_from", type=int, default=-1)
parser.add_argument('--new', action='store_true')
parser.add_argument('--n_part', type=int, default=1)
parser.add_argument('--image_size', type=int, default=128)
parser.add_argument('--network_capacity', type=int, default=16)
parser.add_argument('--batch_size', type=int, default=64)
parser.add_argument('--gradient_accumulate_every', type=int, default=1)
parser.add_argument('--num_train_steps', type=int, default=200000)
parser.add_argument('--num_workers', type=int, default=None)
parser.add_argument('--save_every', type=int, default=1000)
parser.add_argument('--num_image_tiles', type=int, default=8)
parser.add_argument('--learning_rate', type=float, default=2e-4)
args = parser.parse_args()
print(args)
train_from_folder(args.data, args.results_dir, args.models_dir, args.name, args.new, args.load_from, args.n_part,
args.image_size, args.network_capacity, args.batch_size, args.gradient_accumulate_every, args.num_train_steps,
args.learning_rate, args.num_workers, args.save_every, args.num_image_tiles)
| 36.580247 | 120 | 0.676679 |
07810d92eed545f822f378b77921b1e6519aac3b | 5,648 | py | Python | test/arwyerContigFilter_server_test.py | arwyer/arwyerContigFilter | f41ba06f6f8ee639e9bdf4d0848ae0d15e40eb13 | [
"MIT"
] | null | null | null | test/arwyerContigFilter_server_test.py | arwyer/arwyerContigFilter | f41ba06f6f8ee639e9bdf4d0848ae0d15e40eb13 | [
"MIT"
] | null | null | null | test/arwyerContigFilter_server_test.py | arwyer/arwyerContigFilter | f41ba06f6f8ee639e9bdf4d0848ae0d15e40eb13 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import unittest
import os # noqa: F401
import json # noqa: F401
import time
import requests
from os import environ
try:
from ConfigParser import ConfigParser # py2
except:
from configparser import ConfigParser # py3
from pprint import pprint # noqa: F401
from biokbase.workspace.client import Workspace as workspaceService
from arwyerContigFilter.arwyerContigFilterImpl import arwyerContigFilter
from arwyerContigFilter.arwyerContigFilterServer import MethodContext
from arwyerContigFilter.authclient import KBaseAuth as _KBaseAuth
from AssemblyUtil.AssemblyUtilClient import AssemblyUtil
class arwyerContigFilterTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
token = environ.get('KB_AUTH_TOKEN', None)
config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
cls.cfg = {}
config = ConfigParser()
config.read(config_file)
for nameval in config.items('arwyerContigFilter'):
cls.cfg[nameval[0]] = nameval[1]
# Getting username from Auth profile for token
authServiceUrl = cls.cfg['auth-service-url']
auth_client = _KBaseAuth(authServiceUrl)
user_id = auth_client.get_user(token)
# WARNING: don't call any logging methods on the context object,
# it'll result in a NoneType error
cls.ctx = MethodContext(None)
cls.ctx.update({'token': token,
'user_id': user_id,
'provenance': [
{'service': 'arwyerContigFilter',
'method': 'please_never_use_it_in_production',
'method_params': []
}],
'authenticated': 1})
cls.wsURL = cls.cfg['workspace-url']
cls.wsClient = workspaceService(cls.wsURL)
cls.serviceImpl = arwyerContigFilter(cls.cfg)
cls.scratch = cls.cfg['scratch']
cls.callback_url = os.environ['SDK_CALLBACK_URL']
@classmethod
def tearDownClass(cls):
if hasattr(cls, 'wsName'):
cls.wsClient.delete_workspace({'workspace': cls.wsName})
print('Test workspace was deleted')
def getWsClient(self):
return self.__class__.wsClient
def getWsName(self):
if hasattr(self.__class__, 'wsName'):
return self.__class__.wsName
suffix = int(time.time() * 1000)
wsName = "test_arwyerContigFilter_" + str(suffix)
ret = self.getWsClient().create_workspace({'workspace': wsName}) # noqa
self.__class__.wsName = wsName
return wsName
def getImpl(self):
return self.__class__.serviceImpl
def getContext(self):
return self.__class__.ctx
# NOTE: According to Python unittest naming rules test method names should start from 'test'. # noqa
def load_fasta_file(self, filename, obj_name, contents):
f = open(filename, 'w')
f.write(contents)
f.close()
assemblyUtil = AssemblyUtil(self.callback_url)
assembly_ref = assemblyUtil.save_assembly_from_fasta({'file': {'path': filename},
'workspace_name': self.getWsName(),
'assembly_name': obj_name
})
return assembly_ref
# NOTE: According to Python unittest naming rules test method names should start from 'test'. # noqa
def test_filter_contigs_ok(self):
# First load a test FASTA file as an KBase Assembly
fasta_content = '>seq1 something soemthing asdf\n' \
'agcttttcat\n' \
'>seq2\n' \
'agctt\n' \
'>seq3\n' \
'agcttttcatgg'
assembly_ref = self.load_fasta_file(os.path.join(self.scratch, 'test1.fasta'),
'TestAssembly',
fasta_content)
# Second, call your implementation
ret = self.getImpl().filter_contigs(self.getContext(),
{'workspace_name': self.getWsName(),
'assembly_input_ref': assembly_ref,
'min_length': 10
})
# Validate the returned data
self.assertEqual(ret[0]['n_initial_contigs'], 3)
self.assertEqual(ret[0]['n_contigs_removed'], 1)
self.assertEqual(ret[0]['n_contigs_remaining'], 2)
def test_filter_contigs_err1(self):
with self.assertRaises(ValueError) as errorContext:
self.getImpl().filter_contigs(self.getContext(),
{'workspace_name': self.getWsName(),
'assembly_input_ref': '1/fake/3',
'min_length': '-10'})
self.assertIn('min_length parameter cannot be negative', str(errorContext.exception))
def test_filter_contigs_err2(self):
with self.assertRaises(ValueError) as errorContext:
self.getImpl().filter_contigs(self.getContext(),
{'workspace_name': self.getWsName(),
'assembly_input_ref': '1/fake/3',
'min_length': 'ten'})
self.assertIn('Cannot parse integer from min_length parameter', str(errorContext.exception))
| 42.466165 | 104 | 0.565864 |
c3bca896ee4afef94311e02269ecc4249a133cab | 4,270 | py | Python | benchmarks/f3_wrong_hints_permutations/scaling_software_termination/5-2Nested_false-termination_10.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints_permutations/scaling_software_termination/5-2Nested_false-termination_10.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints_permutations/scaling_software_termination/5-2Nested_false-termination_10.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import Tuple, FrozenSet
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
import pysmt.typing as types
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
symbols = frozenset([pc, x, y])
m_1 = mgr.Int(-1)
n_locs = 3
max_int = n_locs
ints = []
pcs = []
x_pcs = []
for idx in range(n_locs):
num = mgr.Int(idx)
ints.append(num)
pcs.append(mgr.Equals(pc, num))
x_pcs.append(mgr.Equals(x_pc, num))
for idx in range(n_locs, max_int):
num = mgr.Int(idx)
ints.append(num)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
init = pcs[0]
cfg = []
# pc = 0 & (x >= 0) -> pc' = 1
cond = mgr.GE(x, ints[0])
cfg.append(mgr.Implies(mgr.And(pcs[0], cond), x_pcs[1]))
# pc = 0 & !(x >= 0) -> pc' = -1
cfg.append(mgr.Implies(mgr.And(pcs[0], mgr.Not(cond)), x_pcend))
# pc = 1 -> pc' = 2
cfg.append(mgr.Implies(pcs[1], x_pcs[2]))
# pc = 2 -> pc' = 0
cfg.append(mgr.Implies(pcs[2], x_pcs[0]))
# pc = -1 -> pc' = -1
cfg.append(mgr.Implies(pcend, x_pcend))
trans = []
same_x = mgr.Equals(x_x, x)
same_y = mgr.Equals(x_y, y)
same = mgr.And(same_x, same_y)
# pc = 0 -> same
trans.append(mgr.Implies(pcs[0], same))
# pc = 1 -> x' = x + y & same_y
trans.append(mgr.Implies(pcs[1],
mgr.And(mgr.Equals(x_x, mgr.Plus(x, y)),
same_y)))
# pc = 2 -> same_x & y' = y + 1
trans.append(mgr.Implies(pcs[2],
mgr.And(same_x,
mgr.Equals(x_y, mgr.Plus(y, ints[1])))))
# pc = end -> same
trans.append(mgr.Implies(pcend, same))
trans = mgr.And(*cfg, *trans)
fairness = mgr.Not(mgr.Equals(pc, m_1))
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
symbs = frozenset([pc, x, y])
m_100 = mgr.Int(-100)
m_1 = mgr.Int(-1)
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_4 = mgr.Int(4)
i_20 = mgr.Int(20)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
res = []
loc0 = Location(env, mgr.TRUE())
loc0.set_progress(0, mgr.TRUE())
h_pc = Hint("h_pc1", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0])
res.append(h_pc)
loc0 = Location(env, mgr.Equals(pc, i_1))
loc0.set_progress(1, mgr.GT(x_pc, mgr.Plus(pc, i_1)))
loc1 = Location(env, mgr.GT(pc, i_2))
loc1.set_progress(0, mgr.Equals(x_pc, i_1))
h_pc = Hint("h_pc0", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(x, y)))
loc1 = Location(env, mgr.GE(x, i_2), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, y))
h_x = Hint("h_x2", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(y, m_100))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Times(y, y)))
loc1 = Location(env, mgr.GE(y, i_0))
loc1.set_progress(0, mgr.GE(x_y, mgr.Plus(y, i_1)))
h_y = Hint("h_y6", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1])
res.append(h_y)
loc0 = Location(env, mgr.LE(x, i_0))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Times(x, x)))
loc1 = Location(env, mgr.GE(x, i_0))
loc1.set_progress(0, mgr.LT(x_x, mgr.Times(m_1, x, x)))
h_x = Hint("h_x5", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
return frozenset(res)
| 28.657718 | 77 | 0.568618 |
50bdda1c624e32b3a3f1d145117b31678b3cf140 | 389 | py | Python | Web Scraping (Beautiful Soup, Scrapy, Selenium)/Web Scraping_ Day68/web scraping python wikipedia/Solution/crawler.py | pooja-gera/TheWireUsChallenge | 18abb5ff3fd31b7dbfef41b8008f91d3fac029d3 | [
"MIT"
] | null | null | null | Web Scraping (Beautiful Soup, Scrapy, Selenium)/Web Scraping_ Day68/web scraping python wikipedia/Solution/crawler.py | pooja-gera/TheWireUsChallenge | 18abb5ff3fd31b7dbfef41b8008f91d3fac029d3 | [
"MIT"
] | null | null | null | Web Scraping (Beautiful Soup, Scrapy, Selenium)/Web Scraping_ Day68/web scraping python wikipedia/Solution/crawler.py | pooja-gera/TheWireUsChallenge | 18abb5ff3fd31b7dbfef41b8008f91d3fac029d3 | [
"MIT"
] | 1 | 2021-05-21T09:30:41.000Z | 2021-05-21T09:30:41.000Z | import bs4
import requests
response = requests.get("https://en.wikipedia.org/wiki/Python_(programming_language)")
if response is not None:
page = bs4.BeautifulSoup(response.text, 'html.parser')
title = page.select("#firstHeading")[0].text
paragraphs = page.select("p")
print(title)
intro = '\n'.join([ para.text for para in paragraphs[0:5]])
print (intro)
| 24.3125 | 86 | 0.681234 |
51ce8aa014b8a559db7b304b859c2b71dbf52fa9 | 1,315 | py | Python | test/test_deprecation.py | marrow/schema | e2b16ec45329a646156388936c2e779ddcd8fa77 | [
"MIT"
] | 3 | 2016-09-03T07:00:50.000Z | 2021-06-19T18:52:56.000Z | test/test_deprecation.py | marrow/schema | e2b16ec45329a646156388936c2e779ddcd8fa77 | [
"MIT"
] | 6 | 2015-01-23T19:32:04.000Z | 2019-10-23T15:36:48.000Z | test/test_deprecation.py | marrow/schema | e2b16ec45329a646156388936c2e779ddcd8fa77 | [
"MIT"
] | 2 | 2015-11-13T20:02:17.000Z | 2018-01-30T12:01:47.000Z | import pytest
import warnings
from marrow.schema.util import DeclarativeAttributes
DEPRECATED = (
(DeclarativeAttributes, 'Attributes')
)
@pytest.mark.parametrize("cls,dst", DEPRECATED)
def test_deprecation(cls, dst):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
cls()
assert len(w) == 1, "Only one warning should be raised."
assert issubclass(w[-1].category, DeprecationWarning), "Warning must be a DeprecationWarning."
assert dst in str(w[-1].message), "Warning should mention correct class to use."
def test_depreciated_validation_import():
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
import marrow.schema.validation
import marrow.schema.validation.base
import marrow.schema.validation.compound
import marrow.schema.validation.date
import marrow.schema.validation.geo
import marrow.schema.validation.network
import marrow.schema.validation.pattern
import marrow.schema.validation.testing
import marrow.schema.validation.util
assert len(w) == 1, "Only one warning should be raised."
assert issubclass(w[-1].category, DeprecationWarning), "Warning must be DeprecationWarning."
assert 'marrow.schema.validate' in str(w[-1].message), "Warning should mention correct module to import."
| 32.073171 | 107 | 0.769582 |
ca8d49f67e1e2ebdf47424a4b4adfbc3e5c258ea | 3,379 | py | Python | var/spack/repos/builtin/packages/r-scater/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 9 | 2018-04-18T07:51:40.000Z | 2021-09-10T03:56:57.000Z | var/spack/repos/builtin/packages/r-scater/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 907 | 2018-04-18T11:17:57.000Z | 2022-03-31T13:20:25.000Z | var/spack/repos/builtin/packages/r-scater/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 29 | 2018-11-05T16:14:23.000Z | 2022-02-03T16:07:09.000Z | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RScater(RPackage):
"""Single-Cell Analysis Toolkit for Gene Expression Data in R.
A collection of tools for doing various analyses of single-cell RNA-seq
gene expression data, with a focus on quality control and
visualization."""
homepage = "https://bioconductor.org/packages/scater"
git = "https://git.bioconductor.org/packages/scater.git"
version('1.12.2', commit='1518dc27a87c79181c34107d639e95fe55e55092')
version('1.10.1', commit='2e6694af2929092f263c2b0830d48b3f9632e70c')
version('1.8.4', commit='d560a9a378541d53d17990d2aa2cd28874df3dcd')
version('1.6.3', commit='964effb4e883102d7c8cae627dbac4ba5d216a75')
version('1.4.0', commit='90a2eab66ff82ba6dd7fbb33e41cd0ded20fa218')
depends_on('r@3.3:', when='@1.4.0', type=('build', 'run'))
depends_on('r-biobase', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-ggplot2', type=('build', 'run'))
depends_on('r-biomart', when='@1.4.0:1.6.3', type=('build', 'run'))
depends_on('r-biocgenerics', type=('build', 'run'))
depends_on('r-data-table', when='@1.4.0:1.6.3', type=('build', 'run'))
depends_on('r-dplyr', when='@1.4.0:1.12.2', type=('build', 'run'))
depends_on('r-edger', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-ggbeeswarm', type=('build', 'run'))
depends_on('r-limma', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-matrix', type=('build', 'run'))
depends_on('r-matrixstats', when='@1.4.0:1.6.3', type=('build', 'run'))
depends_on('r-plyr', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-reshape2', when='@1.4.0:1.10.1', type=('build', 'run'))
depends_on('r-rhdf5', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-rjson', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-shiny', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-shinydashboard', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-tximport', when='@1.4.0:1.8.4', type=('build', 'run'))
depends_on('r-viridis', type=('build', 'run'))
depends_on('r@3.4:', when='@1.6.3', type=('build', 'run'))
depends_on('r-singlecellexperiment', when='@1.6.3:', type=('build', 'run'))
depends_on('r-summarizedexperiment', when='@1.6.3:', type=('build', 'run'))
depends_on('r-s4vectors', when='@1.6.3:', type=('build', 'run'))
depends_on('r-rcpp', when='@1.6.3:', type=('build', 'run'))
depends_on('r-rhdf5lib', when='@1.6.3:1.10.1', type=('build', 'run'))
depends_on('r-beachmat', when='@1.6.3:', type=('build', 'run'))
depends_on('r@3.5:', when='@1.8.4', type=('build', 'run'))
depends_on('r-delayedmatrixstats', when='@1.8.4:', type=('build', 'run'))
depends_on('r-rcpp@0.12.14:', when='@1.8.4:', type=('build', 'run'))
depends_on('r-delayedarray', when='@1.8.4:', type=('build', 'run'))
depends_on('r-biocparallel', when='@1.10.1:', type=('build', 'run'))
depends_on('r@3.6:', when='@1.12.2', type=('build', 'run'))
depends_on('r-biocneighbors', when='@1.12.2:', type=('build', 'run'))
depends_on('r-biocsingular', when='@1.12.2:', type=('build', 'run'))
| 52.796875 | 79 | 0.616751 |
da697f6ce064a840143e9716a71370705cdd7c41 | 10,249 | py | Python | AW_hotkeyEditor.py | boredstiff/MotionBuilderHotkeyEditor | e377103ac194a1da2680006881de25442ae9d6e6 | [
"Apache-2.0"
] | 4 | 2019-01-30T10:41:07.000Z | 2020-07-09T05:45:31.000Z | AW_hotkeyEditor.py | OlafHaag/MotionBuilderHotkeyEditor | 7198dc0bb7cf0c20b505be10fd6c7810bd13e66d | [
"Apache-2.0"
] | null | null | null | AW_hotkeyEditor.py | OlafHaag/MotionBuilderHotkeyEditor | 7198dc0bb7cf0c20b505be10fd6c7810bd13e66d | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Alex Widener
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
What this is:
A Hotkey Editor for Autodesk MotionBuilder.
See documentation here:
http://alexwidener.github.io/MotionBuilderHotkeyEditor/
Thanks:
Yi Liang Siew for sorting out my stupidity. http://www.sonictk.com/
modified 2018 Olaf Haag:
- added Apache License 2.0 boilerplate notice
- changed code to find config folders
"""
__author__ = "Alex Widener"
__copyright__ = "Alex Widener"
__credits__ = ["Yi Liang Siew"]
__email__ = "alexwidener # gmail"
import os
import time
from shutil import copy
from PySide import QtGui, QtCore
import webbrowser
from pyfbsdk import FBSystem
DEFAULTCONFIGSPATH = FBSystem().ConfigPath + 'Keyboard'
KEYBOARDCONFIGPATH = FBSystem().UserConfigPath + '/Keyboard'
ACTIONSCRIPTPATH = FBSystem().UserConfigPath + '/Scripts'
MAX = '3ds Max.txt'
LIGHTWAVE = 'Lightwave.txt'
MAYA = 'Maya.txt'
MOTIONBUILDERCLASSIC = 'MotionBuilder Classic.txt'
MOTIONBUILDER = 'MotionBuilder.txt'
SOFTIMAGE = 'Softimage.txt'
CUSTOMHOTKEYS = 'customHotkeys.txt'
parent = QtGui.QApplication.activeWindow()
class HotkeyEditor(object):
widgets = []
class UI_HotkeyEditor(QtGui.QWidget):
def __init__(self, parent):
super(UI_HotkeyEditor, self).__init__()
self.setObjectName('HotkeyEditor')
self.setWindowTitle('AW_Hotkey Editor')
self.resize(1280, 720)
self.mbDefaultFile = os.path.join(DEFAULTCONFIGSPATH, 'MotionBuilder.txt')
self.customKeysFile = os.path.join(KEYBOARDCONFIGPATH, CUSTOMHOTKEYS)
self.actionScriptFile = os.path.join(ACTIONSCRIPTPATH, 'ActionScript.txt')
self.customSettings = []
if not os.path.isfile(self.customKeysFile):
copy(self.mbDefaultFile, self.customKeysFile)
if not os.path.isfile(self.actionScriptFile):
self.createActionScriptFile()
self._layouts()
self._connections()
HotkeyEditor.widgets.append(self)
def _getCustomSettings(self):
self.customSettings = []
return [self.replacer(line) for line in open(self.customKeysFile)]
def _layouts(self):
self.main_layout = QtGui.QVBoxLayout()
self.menu_bar = QtGui.QMenuBar()
self.main_layout.addWidget(self.menu_bar)
self.file_menu = self.menu_bar.addMenu('File')
self.help_menu = self.menu_bar.addMenu('Help')
self.doc_menu = self.help_menu.addAction('Documentation')
self.reset_menu = self.file_menu.addMenu('Reset Hotkeys')
self.mb_defaults_menu = self.reset_menu.addAction('MotionBuilder (default)')
self.mbc_defaults_menu = self.reset_menu.addAction('MotionBuilder Classic')
self.lightwave_menu = self.reset_menu.addAction('Lightwave')
self.max_menu = self.reset_menu.addAction('3DS Max')
self.maya_menu = self.reset_menu.addAction('Maya')
self.softimage_menu = self.reset_menu.addAction('SoftImage')
self.path_layout = QtGui.QHBoxLayout()
self.customPathQLine = QtGui.QLineEdit()
self.customPathQLine.setText(self.customKeysFile.replace('/', '\\'))
self.customPathQLine.setEnabled(False)
self.path_layout.addWidget(self.customPathQLine)
self.main_layout.addLayout(self.path_layout)
self.settingsLayout = QtGui.QHBoxLayout()
self.settingsList = QtGui.QTableWidget()
self.settingsList.setColumnCount(2)
self.settingsList.setHorizontalHeaderLabels(['Action', 'Key Combination'])
self.scriptsList = QtGui.QTableWidget()
self.scriptsList.setColumnCount(2)
self.scriptsList.setHorizontalHeaderLabels(['Script', 'Path to Script'])
self.settingsLayout.addWidget(self.settingsList)
self.settingsLayout.addWidget(self.scriptsList)
self._fillTables()
self.main_layout.addLayout(self.settingsLayout)
self.button_layout = QtGui.QHBoxLayout()
self.submit_button = QtGui.QPushButton('Save Changes')
self.button_layout.addStretch(1)
self.button_layout.addWidget(self.submit_button)
self.main_layout.addLayout(self.button_layout)
self.setLayout(self.main_layout)
def _fillTables(self):
allRows = self.settingsList.rowCount()
for row in range(0, allRows):
self.settingsList.removeRow(row)
settingsRow = 0
scriptsRow = 0
settings = self._getCustomSettings()
for line in settings:
newRow = self.settingsList.insertRow(settingsRow)
actionItem = QtGui.QTableWidgetItem(line.partition('=')[0])
hotkeyItem = QtGui.QTableWidgetItem(line.partition('=')[2])
self.settingsList.setItem(settingsRow, 0, actionItem)
self.settingsList.setItem(settingsRow, 1, hotkeyItem)
if 'action.global.script' in line.partition('=')[0]:
agScript = line.partition('=')[0]
num = agScript.partition('action.global.script')[2]
scriptItem = QtGui.QTableWidgetItem('Script%s' % num)
scriptPath = self.getPathFromFile(self.actionScriptFile, num)
pathItem = QtGui.QTableWidgetItem(scriptPath)
self.scriptsList.insertRow(scriptsRow)
self.scriptsList.setItem(scriptsRow, 0, scriptItem)
self.scriptsList.setItem(scriptsRow, 1, pathItem)
scriptsRow += 1
settingsRow += 1
self.settingsList.resizeColumnsToContents()
self.scriptsList.resizeColumnsToContents()
def _connections(self):
self.submit_button.clicked.connect(lambda: self.saveSettings())
self.mb_defaults_menu.triggered.connect(lambda: self._replaceCustomFile(MOTIONBUILDER))
self.mbc_defaults_menu.triggered.connect(lambda: self._replaceCustomFile(MOTIONBUILDERCLASSIC))
self.lightwave_menu.triggered.connect(lambda: self._replaceCustomFile(LIGHTWAVE))
self.max_menu.triggered.connect(lambda: self._replaceCustomFile(MAX))
self.maya_menu.triggered.connect(lambda: self._replaceCustomFile(MAYA))
self.softimage_menu.triggered.connect(lambda: self._replaceCustomFile(SOFTIMAGE))
self.doc_menu.triggered.connect(lambda: self.openWebsite())
def replacer(self, element):
return element.strip().replace('\t', '')
def createActionScriptFile(self):
"""
Creates the user ActionScript file which will store the user's file paths to their scripts.
If you want to increase it, increase the number from 13.
"""
with open(self.actionScriptFile, 'w') as f:
f.write('[ScriptFiles]\r\n')
for i in range(1, 13):
f.write('Script%d = \r\n' % i)
def getPathFromFile(self, acFilePath, num):
with open(acFilePath, 'r') as f:
scriptcontents = f.readlines()
for line in scriptcontents:
if line.startswith('Script%s' % num):
return line.partition(' = ')[2]
def saveSettings(self):
settingsRows = self.settingsList.rowCount()
scriptsRows = self.scriptsList.rowCount()
if os.path.isfile(self.customKeysFile):
os.remove(self.customKeysFile)
with open(self.customKeysFile, 'w') as f:
for x in range(0, settingsRows):
actionItem = self.settingsList.item(x, 0).text()
keyItem = self.settingsList.item(x, 1).text()
if '[' in actionItem and keyItem == '':
f.write(actionItem.strip() + '\r\n')
elif actionItem != '' and keyItem != '':
f.write(actionItem.strip() + ' = ' + keyItem.strip() + '\r\n')
elif actionItem != '' and keyItem == '':
f.write(actionItem.strip() + ' = \r\n')
else:
f.write('\n')
if os.path.isfile(self.actionScriptFile):
os.remove(self.actionScriptFile)
with open(self.actionScriptFile, 'w') as f:
f.write('[ScriptFiles]\n')
for y in range(0, scriptsRows):
scriptItem = self.scriptsList.item(y, 0).text()
pathItem = self.scriptsList.item(y, 1).text()
f.write('{0} = {1} \r\n'.format(scriptItem.strip(), pathItem.rstrip()))
main()
def _replaceCustomFile(self, replacement):
"""
The replacement is whichever system the user chose to replace with.
It will overwrite the user's current settings.
But this allows the user to start their current settings from whatever system they want to start from.
"""
copyFile = os.path.join(DEFAULTCONFIGSPATH, replacement)
newFile = os.path.join(KEYBOARDCONFIGPATH, CUSTOMHOTKEYS)
copy(copyFile, newFile)
self.settingsList.clearContents()
self.scriptsList.clearContents()
while self.settingsList.rowCount() > 0:
self.settingsList.removeRow(0)
while self.scriptsList.rowCount() > 0:
self.scriptsList.removeRow(0)
self._fillTables()
self.settingsList.resizeColumnsToContents()
self.scriptsList.resizeColumnsToContents()
def openWebsite(self):
URL = 'http://alexwidener.github.io/MotionBuilderHotkeyEditor/'
webbrowser.open_new_tab(URL)
def main():
parent = QtGui.QApplication.instance()
currentWidgets = parent.topLevelWidgets()
for w in currentWidgets:
if w.objectName() == 'HotkeyEditor':
w.close()
# start working in the try/Except at some point
main()
HKE = UI_HotkeyEditor(parent)
HKE.show() | 38.100372 | 110 | 0.657625 |
4e2cff4603f6bf5faaf96ace9167a3a18949db67 | 15,983 | py | Python | adwords_python3_examples_10.1.0/v201708/migration/migrate_to_extension_settings.py | xyla-io/hazel | 260ce906761d8b808c21ca61b44cc71ca3329e8c | [
"MIT"
] | null | null | null | adwords_python3_examples_10.1.0/v201708/migration/migrate_to_extension_settings.py | xyla-io/hazel | 260ce906761d8b808c21ca61b44cc71ca3329e8c | [
"MIT"
] | null | null | null | adwords_python3_examples_10.1.0/v201708/migration/migrate_to_extension_settings.py | xyla-io/hazel | 260ce906761d8b808c21ca61b44cc71ca3329e8c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Migrates Feed-based sitelinks at Campaign level to use extension settings.
To learn more about extensionsettings, see:
https://developers.google.com/adwords/api/docs/guides/extension-settings.
To learn more about migrating Feed-based extensions to extension settings, see:
https://developers.google.com/adwords/api/docs/guides/migrate-to-extension-settings
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
from googleads import adwords
# The placeholder type for sitelinks. For the list of all supported placeholder
# types, see:
# https://developers.google.com/adwords/api/docs/appendix/placeholders
PLACEHOLDER_TYPE_SITELINKS = 1
# The placeholder field IDs for sitelinks. For the list of all supported
# placeholder types, see:
# https://developers.google.com/adwords/api/docs/appendix/placeholders
SITE_LINK_FIELDS = {
'TEXT': 1,
'URL': 2,
'LINE2': 3,
'LINE3': 4,
'FINAL_URLS': 5,
'FINAL_MOBILE_URLS': 6,
'TRACKING_URL_TEMPLATE': 7
}
PAGE_SIZE = 500
def CreateExtensionSetting(client, feed_items, campaign_feed, feed_item_ids,
platform_restrictions=None):
"""Creates the extension setting for a list of Feed Items.
Args:
client: an AdWordsClient instance.
feed_items: the list of all Feed Items.
campaign_feed: the original Campaign Feed.
feed_item_ids: the Ids of the feed items for which extension settings should
be created.
platform_restrictions: an optional Platform Restriction for the Feed items.
"""
campaign_extension_setting_service = client.GetService(
'CampaignExtensionSettingService', 'v201708')
extension_feed_items = [{
CreateSitelinkFeedItem(feed_items, feed_item_id)
} for feed_item_id in feed_item_ids]
extension_setting = {
'extensions': extension_feed_items
}
if platform_restrictions:
extension_setting['platformRestrictions'] = platform_restrictions
campaign_extension_setting = {
'campaignId': campaign_feed['campaignId'],
'extensionType': 'SITELINK',
'extensionSetting': extension_setting
}
operation = {
'operand': campaign_extension_setting,
'operator': 'ADD'
}
campaign_extension_setting_service.mutate([operation])
def CreateSitelinkFeedItem(feed_items, feed_item_id):
"""Creates a Sitelink Feed Item.
Args:
feed_items: a list of all Feed Items.
feed_item_id: the Id of a specific Feed Item for which a Sitelink Feed Item
should be created.
Returns:
The new Sitelink Feed Item.
"""
site_link_from_feed = feed_items[feed_item_id]
site_link_feed_item = {
'sitelinkText': site_link_from_feed['text'],
'sitelinkLine2': site_link_from_feed['line2'],
'sitelinkLine3': site_link_from_feed['line3'],
'scheduling': site_link_from_feed['scheduling']
}
if 'finalUrls' in site_link_from_feed and site_link_from_feed['finalUrls']:
site_link_feed_item['sitelinkFinalUrls'] = {
'urls': site_link_from_feed['finalUrls']
}
if 'finalMobileUrls' in site_link_from_feed:
site_link_feed_item['sitelinkFinalMobileUrls'] = {
'urls': site_link_from_feed['finalMobileUrls']
}
site_link_feed_item['sitelinkTrackingUrlTemplate'] = (
site_link_from_feed['trackingUrlTemplate'])
else:
site_link_feed_item['sitelinkUrl'] = site_link_from_feed['url']
return site_link_feed_item
def DeleteCampaignFeed(client, campaign_feed):
"""Deletes a campaign feed.
Args:
client: an AdWordsClient instance.
campaign_feed: the campaign feed to delete.
"""
campaign_feed_service = client.GetService('CampaignFeedService', 'v201708')
operation = {
'operand': campaign_feed,
'operator': 'REMOVE'
}
campaign_feed_service.mutate([operation])
def DeleteOldFeedItems(client, feed_item_ids, feed):
"""Deletes the old feed items for which extension settings have been created.
Args:
client: an AdWordsClient instance.
feed_item_ids: a list of Feed Item Ids.
feed: the Feed containing the given Feed Item Ids.
"""
if not feed_item_ids:
return
feed_item_service = client.GetService('FeedItemService', 'v201708')
operations = [{
'operator': 'REMOVE',
'operand': {
'feedId': feed['id'],
'feedItemId': feed_item_id
}
} for feed_item_id in feed_item_ids]
feed_item_service.mutate(operations)
def GetCampaignFeeds(client, feed, placeholder_type):
"""Get a list of Feed Item Ids used by a campaign via a given Campaign Feed.
Args:
client: an AdWordsClient instance.
feed: a Campaign Feed.
placeholder_type: the Placeholder Type.
Returns:
A list of Feed Item Ids.
"""
campaign_feed_service = client.GetService('CampaignFeedService', 'v201708')
campaign_feeds = []
more_pages = True
selector = {
'fields': ['CampaignId', 'MatchingFunction', 'PlaceholderTypes'],
'predicates': [
{
'field': 'Status',
'operator': 'EQUALS',
'values': ['ENABLED']
},
{
'field': 'FeedId',
'operator': 'EQUALS',
'values': [feed['id']]
},
{
'field': 'PlaceholderTypes',
'operator': 'CONTAINS_ANY',
'values': [placeholder_type]
}
],
'paging': {
'startIndex': 0,
'numberResults': PAGE_SIZE
}
}
while more_pages:
page = campaign_feed_service.get(selector)
if 'entries' in page:
campaign_feeds.extend(page['entries'])
selector['paging']['startIndex'] += PAGE_SIZE
more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries'])
return campaign_feeds
def GetFeeds(client):
"""Returns a list of all enabled Feeds.
Args:
client: an AdWordsClient instance.
Returns:
A list containing all enabled Feeds.
"""
feed_service = client.GetService('FeedService', 'v201708')
feeds = []
more_pages = True
selector = {
'fields': ['Id', 'Name', 'Attributes'],
'predicates': [
{
'field': 'Origin',
'operator': 'EQUALS',
'values': ['USER']
},
{
'field': 'FeedStatus',
'operator': 'EQUALS',
'values': ['ENABLED']
}
],
'paging': {
'startIndex': 0,
'numberResults': PAGE_SIZE
}
}
while more_pages:
page = feed_service.get(selector)
if 'entries' in page:
feeds.extend(page['entries'])
selector['paging']['startIndex'] += PAGE_SIZE
more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries'])
return feeds
def GetFeedItems(client, feed):
"""Returns the Feed Items for a given Feed.
Args:
client: an AdWordsClient instance.
feed: the Feed we are retrieving Feed Items from.
Returns:
The Feed Items associated with the given Feed.
"""
feed_item_service = client.GetService('FeedItemService', 'v201708')
feed_items = []
more_pages = True
selector = {
'fields': ['FeedItemId', 'AttributeValues', 'Scheduling'],
'predicates': [
{
'field': 'Status',
'operator': 'EQUALS',
'values': ['ENABLED']
},
{
'field': 'FeedId',
'operator': 'EQUALS',
'values': [feed['id']]
}
],
'paging': {
'startIndex': 0,
'numberResults': PAGE_SIZE
}
}
while more_pages:
page = feed_item_service.get(selector)
if 'entries' in page:
feed_items.extend(page['entries'])
selector['paging']['startIndex'] += PAGE_SIZE
more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries'])
return feed_items
def GetFeedItemIdsForCampaign(campaign_feed):
"""Gets the Feed Item Ids used by a campaign through a given Campaign Feed.
Args:
campaign_feed: the Campaign Feed we are retrieving Feed Item Ids from.
Returns:
A list of Feed Item IDs.
"""
feed_item_ids = set()
try:
lhs_operand = campaign_feed['matchingFunction']['lhsOperand']
except KeyError:
lhs_operand = None
if (lhs_operand and lhs_operand[0]['FunctionArgumentOperand.Type'] ==
'RequestContextOperand'):
request_context_operand = lhs_operand[0]
if (request_context_operand['contextType'] == 'FEED_ITEM_ID' and
campaign_feed['matchingFunction']['operator'] == 'IN'):
for argument in campaign_feed['matchingFunction']['rhsOperand']:
if argument['xsi_type'] == 'ConstantOperand':
feed_item_ids.add(argument['longValue'])
return feed_item_ids
def GetFeedMapping(client, feed, placeholder_type):
"""Gets the Feed Mapping for a given Feed.
Args:
client: an AdWordsClient instance.
feed: the Feed we are retrieving the Feed Mapping for.
placeholder_type: the Placeholder Type we are looking for.
Returns:
A dictionary containing the Feed Mapping.
"""
feed_mapping_service = client.GetService('FeedMappingService', 'v201708')
attribute_mappings = {}
more_pages = True
selector = {
'fields': ['FeedMappingId', 'AttributeFieldMappings'],
'predicates': [
{
'field': 'FeedId',
'operator': 'EQUALS',
'values': [feed['id']]
},
{
'field': 'PlaceholderType',
'operator': 'EQUALS',
'values': [placeholder_type]
}
],
'paging': {
'startIndex': 0,
'numberResults': PAGE_SIZE
}
}
while more_pages:
page = feed_mapping_service.get(selector)
if 'entries' in page:
# Normally, a feed attribute is mapped only to one field. However, you may
# map it to more than one field if needed.
for feed_mapping in page['entries']:
for attribute_mapping in feed_mapping['attributeFieldMappings']:
# Since attribute mappings can have multiple values for each key,
# we use a list to store the values.
if attribute_mapping['feedAttributeId'] in attribute_mappings:
attribute_mappings[attribute_mapping['feedAttributeId']].append(
attribute_mapping['fieldId'])
else:
attribute_mappings[attribute_mapping['feedAttributeId']] = [
attribute_mapping['fieldId']]
selector['paging']['startIndex'] += PAGE_SIZE
more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries'])
return attribute_mappings
def GetPlatformRestrictions(campaign_feed):
"""Get the Platform Restrictions for a given Campaign Feed.
Args:
campaign_feed: the Campaign Feed we are retreiving Platform Restrictons for.
Returns:
The Platform Restrictions for the given feed.
"""
platform_restrictions = None
if campaign_feed['matchingFunction']['operator'] == 'AND':
for argument in campaign_feed['matchingFunction']['lhsOperand']:
# Check if matchingFunction is EQUALS(CONTEXT.DEVICE, 'Mobile')
if argument['value']['operator'] == 'EQUALS':
request_context_operand = argument['value']['lhsOperand'][0]
if (request_context_operand and
request_context_operand == 'DEVICE_PLATFORM'):
# This needs to be capitalized for ExtensionSettingPlatform.
platform_restrictions = argument['value']['rhsOperand'][0].upper()
return platform_restrictions
def GetSitelinksFromFeed(client, feed):
"""Gets the sitelinks from a feed.
Args:
client: an AdWordsClient instance.
feed: the feed used to retrieve sitelinks.
Returns:
A dictionary mapping the feed item ID to SiteLinkFromFeed.
"""
# Retrieve the feed's attribute mapping.
feed_mappings = GetFeedMapping(client, feed, PLACEHOLDER_TYPE_SITELINKS)
feed_items = {}
for feed_item in GetFeedItems(client, feed):
site_link_from_feed = {}
for attribute_value in feed_item['attributeValues']:
if attribute_value['feedAttributeId'] in feed_mappings:
for field_id in feed_mappings[attribute_value['feedAttributeId']]:
if field_id == SITE_LINK_FIELDS['TEXT']:
site_link_from_feed['text'] = attribute_value['stringValue']
elif field_id == SITE_LINK_FIELDS['URL']:
site_link_from_feed['url'] = attribute_value['stringValue']
elif field_id == SITE_LINK_FIELDS['FINAL_URLS']:
site_link_from_feed['finalUrls'] = attribute_value['stringValues']
elif field_id == SITE_LINK_FIELDS['FINAL_MOBILE_URLS']:
site_link_from_feed['finalMobileUrls'] = attribute_value[
'stringValues']
elif field_id == SITE_LINK_FIELDS['TRACKING_URL_TEMPLATE']:
site_link_from_feed['trackingUrlTemplate'] = attribute_value[
'stringValue']
elif field_id == SITE_LINK_FIELDS['LINE2']:
site_link_from_feed['line2'] = attribute_value['stringValue']
elif field_id == SITE_LINK_FIELDS['LINE3']:
site_link_from_feed['line3'] = attribute_value['stringValue']
else:
print('No applicable Site Link Field found for Id: %s' % field_id)
if 'scheduling' in feed_item:
site_link_from_feed['scheduling'] = feed_item['scheduling']
feed_items[feed_item['feedItemId']] = site_link_from_feed
return feed_items
def main(client):
# Get all of the feeds for the current user.
feeds = GetFeeds(client)
for feed in feeds:
# Retrieve all the sitelinks from the current feed.
feed_items = GetSitelinksFromFeed(client, feed)
# Get all the instances where a sitelink from this feed has been added to a
# campaign.
campaign_feeds = GetCampaignFeeds(client, feed, PLACEHOLDER_TYPE_SITELINKS)
all_feed_items_to_delete = []
for campaign_feed in campaign_feeds:
# Retrieve the sitelinks that have been associated with this Campaign.
feed_item_ids = GetFeedItemIdsForCampaign(campaign_feed)
if feed_item_ids == 0:
print(('Migration skipped for campaign feed with campaign ID %d '
'and feed ID %d because no mapped feed item IDs were found in '
'the campaign feed\'s matching function.'
% (campaign_feed['campaign_id'], campaign_feed['feed_id'])))
continue
platform_restrictions = GetPlatformRestrictions(campaign_feed)
# Delete the campaign feed that associates the sitelinks from the feed to
# the Campaign.
DeleteCampaignFeed(client, campaign_feed)
# Create extension settings instead of sitelinks.
CreateExtensionSetting(client, feed_items, campaign_feed, feed_item_ids,
platform_restrictions)
# Mark the sitelinks from the feed for deletion.
all_feed_items_to_delete.extend(feed_item_ids)
# Delete all the sitelinks from the feed.
DeleteOldFeedItems(client, all_feed_items_to_delete, feed)
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client)
| 30.270833 | 83 | 0.666208 |
8799d962e142ec22b6af858c6b82565cc104c0b5 | 785 | py | Python | 0x0F-python-object_relational_mapping/9-model_state_filter_a.py | Nahi-Terefe/alx-higher_level_programming | c67a78a6f79e853918963971f8352979e7691541 | [
"MIT"
] | null | null | null | 0x0F-python-object_relational_mapping/9-model_state_filter_a.py | Nahi-Terefe/alx-higher_level_programming | c67a78a6f79e853918963971f8352979e7691541 | [
"MIT"
] | null | null | null | 0x0F-python-object_relational_mapping/9-model_state_filter_a.py | Nahi-Terefe/alx-higher_level_programming | c67a78a6f79e853918963971f8352979e7691541 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
"""filter all the states that contains the a letter"""
from model_state import Base, State
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import sys
if __name__ == '__main__':
username = sys.argv[1]
password = sys.argv[2]
DB_name = sys.argv[3]
engine = create_engine('mysql+mysqldb://{}:{}@localhost:3306/{}'
.format(username, password, DB_name),
pool_pre_ping=True)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
states = session.query(State).order_by(State.id)
for i, state in enumerate(states, 1):
if 'a' in state.name:
print("{}: {}".format(i, state.name))
session.close()
| 31.4 | 68 | 0.634395 |
68cd6a5ba24c7ca07f40e90158fcb72830f1b248 | 1,746 | py | Python | source/16/mc-16-02-02-esmajor-1white.py | schef/schef.github.io | ac6fc70e5077deeeb8233ede89e0895fdc2a0d05 | [
"MIT"
] | null | null | null | source/16/mc-16-02-02-esmajor-1white.py | schef/schef.github.io | ac6fc70e5077deeeb8233ede89e0895fdc2a0d05 | [
"MIT"
] | null | null | null | source/16/mc-16-02-02-esmajor-1white.py | schef/schef.github.io | ac6fc70e5077deeeb8233ede89e0895fdc2a0d05 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
#import time
import random
import imp
modl = imp.load_source('ppFunctions', '../00/ppFunctions.py')
logo = imp.load_source('logo', '../00/logo.py')
import os
from ppFunctions import *
from termcolor import colored, cprint
os.system('clear')
from logo import *
#sleep becouse of loading midi modules
print_logo()
time.sleep(1)
print_status = lambda x: cprint(x, 'white', 'on_blue')
print_help = lambda x: cprint(x, 'red')
hit = 0
rounde = 1
done = False
generatedList = []
for i in range(stringToMidiNum("c"), stringToMidiNum("h")+1):
if i%12 in whiteTonesBase:
generatedList.append(i)
while True:
try:
os.system('clear')
print_logo()
print_status("Status: round=" + str(rounde) + ", hit=" + str(hit))
print_help("Help: rEPEAT sKIP")
playHarmonicNotes(stringToMidiNum("es g b"))
randomNote = random.choice(generatedList)
playNote(randomNote)
while not done:
guessedNote = input("Your input: ")
if guessedNote == "r":
print("Repeating...")
playHarmonicNotes(stringToMidiNum("es g b"))
playNote(randomNote)
elif guessedNote == "s":
print("Skiping...")
done = True
elif guessedNote not in lilypondTones:
print("What? Syntax error!")
else:
if (lilypondTones[guessedNote] == randomNote%12):
print("Yea!")
hit += 1
rounde += 1
done = True
else:
print("Almost!")
playNote(randomNote)
playNote(lilypondTones[guessedNote])
hit = 0
done = False
except (KeyboardInterrupt):
print('...Program Stopped Manually!')
raise
| 28.16129 | 70 | 0.595647 |
4d4aa98e08158327a5b80e717c9156d507c9e0f0 | 7,613 | py | Python | tests/test_dict_config.py | tirkarthi/twiggy | 054a07e71f2ceae231270fc62cb7f0a59cf72958 | [
"BSD-3-Clause"
] | 36 | 2015-02-06T06:19:27.000Z | 2022-01-31T18:36:22.000Z | tests/test_dict_config.py | tirkarthi/twiggy | 054a07e71f2ceae231270fc62cb7f0a59cf72958 | [
"BSD-3-Clause"
] | 51 | 2015-01-13T18:15:06.000Z | 2021-05-16T06:03:07.000Z | tests/test_dict_config.py | tirkarthi/twiggy | 054a07e71f2ceae231270fc62cb7f0a59cf72958 | [
"BSD-3-Clause"
] | 6 | 2016-05-31T20:11:12.000Z | 2021-09-08T16:37:26.000Z | import copy
import pytest
from six import StringIO
import twiggy
#
# Tests: dict_config
#
VALID_CONFIG = {
'version': '1.0',
'incremental': False,
'outputs': {
'out1': {
'output': 'twiggy.outputs.StreamOutput',
'kwargs': {'stream': 'testing1'},
},
'out2': {
'output': 'twiggy.outputs.StreamOutput',
'kwargs': {'stream': 'testing2'},
'format': 'twiggy.formats.shell_format'
},
},
'emitters': {
'all': {
'level': 'DEBUG',
'output_name': 'out1'
},
'some': {
'level': 'WARNING',
'filters': [
{'filter': 'twiggy.filters.names',
'args': ['a', 'b'],
'kwargs': {}
},
{'filter': 'twiggy.filters.names',
'args': ['c', 'd'],
'kwargs': {}
},
],
'output_name': 'out2'
},
}
}
@pytest.fixture(autouse=True)
def twiggy_globals():
twiggy._populate_globals()
yield
twiggy._del_globals()
@pytest.fixture
def internal_log():
output = twiggy.internal_log.output
buf = StringIO()
new_output = twiggy.outputs.StreamOutput(format=output._format, stream=buf)
twiggy.internal_log.output = new_output
yield buf
twiggy.internal_log.output = output
def test_dict_config_invalid(internal_log):
with pytest.raises(ValueError) as excinfo:
twiggy.dict_config({})
assert excinfo.value.message == "Config dict must contain a 'version' key"
def test_dict_config_valid(mocker):
def return_how_called(*args, **kwargs):
return (args, kwargs)
cfg = copy.deepcopy(VALID_CONFIG)
add_emitters = mocker.patch('twiggy.add_emitters')
mocker.patch('twiggy.emitters')
emitters_dict_clear = mocker.patch('twiggy.emitters.clear')
mocker.patch('twiggy.filters.names', return_how_called)
twiggy.dict_config(cfg)
assert emitters_dict_clear.call_args_list == [mocker.call()]
assert len(add_emitters.call_args_list) == 1
assert len(add_emitters.call_args_list[0][0]) == 2
# call_args_list is nested like this: [call(positional_args(first_emitter)),]
# We expect to have called add_emitters once with two positional args which are
# themselves tuples
if add_emitters.call_args_list[0][0][0][0] == 'all':
all_emitter = add_emitters.call_args_list[0][0][0]
some_emitter = add_emitters.call_args_list[0][0][1]
else:
some_emitter = add_emitters.call_args_list[0][0][0]
all_emitter = add_emitters.call_args_list[0][0][1]
assert all_emitter[0] == 'all'
assert all_emitter[1] == twiggy.levels.DEBUG
assert all_emitter[2] is None
assert isinstance(all_emitter[3], twiggy.outputs.StreamOutput)
assert all_emitter[3]._format == twiggy.formats.line_format
assert all_emitter[3].stream == 'testing1'
assert some_emitter[0] == 'some'
assert some_emitter[1] == twiggy.levels.WARNING
assert some_emitter[2] == [(('a', 'b'), {}), (('c', 'd'), {})]
assert isinstance(some_emitter[3], twiggy.outputs.StreamOutput)
assert some_emitter[3]._format == twiggy.formats.shell_format
assert some_emitter[3].stream == 'testing2'
def test_dict_config_incremental_true(mocker):
def return_how_called(*args, **kwargs):
return (args, kwargs)
cfg = copy.deepcopy(VALID_CONFIG)
del cfg['emitters']['some']
cfg['incremental'] = True
add_emitters = mocker.patch('twiggy.add_emitters')
mocker.patch('twiggy.emitters')
emitters_dict_clear = mocker.patch('twiggy.emitters.clear')
mocker.patch('twiggy.filters.names', return_how_called)
twiggy.dict_config(cfg)
assert emitters_dict_clear.call_args_list == []
assert len(add_emitters.call_args_list) == 1
assert len(add_emitters.call_args_list[0][0]) == 1
cfg = copy.deepcopy(VALID_CONFIG)
del cfg['emitters']['all']
cfg['incremental'] = True
twiggy.dict_config(cfg)
assert emitters_dict_clear.call_args_list == []
assert len(add_emitters.call_args_list) == 2
assert len(add_emitters.call_args_list[0][0]) == 1
assert len(add_emitters.call_args_list[1][0]) == 1
# call_args_list is nested like this: [call(positional_args(first_emitter)),]
# We expect to have called add_emitters twice with one positional arg each time
all_emitter = add_emitters.call_args_list[0][0][0]
some_emitter = add_emitters.call_args_list[1][0][0]
assert all_emitter[0] == 'all'
assert all_emitter[1] == twiggy.levels.DEBUG
assert all_emitter[2] is None
assert isinstance(all_emitter[3], twiggy.outputs.StreamOutput)
assert all_emitter[3]._format == twiggy.formats.line_format
assert all_emitter[3].stream == 'testing1'
assert some_emitter[0] == 'some'
assert some_emitter[1] == twiggy.levels.WARNING
assert some_emitter[2] == [(('a', 'b'), {}), (('c', 'd'), {})]
assert isinstance(some_emitter[3], twiggy.outputs.StreamOutput)
assert some_emitter[3]._format == twiggy.formats.shell_format
assert some_emitter[3].stream == 'testing2'
def test_dict_config_incremental_false_order(mocker):
"""
With incremental=false it is important that the dictionary is cleared before the emitter is
added. We'll do this by testing the emitters dict instead of a mock
"""
cfg = copy.deepcopy(VALID_CONFIG)
del cfg['emitters']['some']
twiggy.dict_config(cfg)
assert len(twiggy.emitters) == 1
assert 'all' in twiggy.emitters
cfg = copy.deepcopy(VALID_CONFIG)
del cfg['emitters']['all']
twiggy.dict_config(cfg)
assert len(twiggy.emitters) == 1
assert 'some' in twiggy.emitters
def test_dict_config_incremental_false_contents(mocker):
def return_how_called(*args, **kwargs):
return (args, kwargs)
cfg = copy.deepcopy(VALID_CONFIG)
del cfg['emitters']['some']
add_emitters = mocker.patch('twiggy.add_emitters')
mocker.patch('twiggy.emitters')
emitters_dict_clear = mocker.patch('twiggy.emitters.clear')
mocker.patch('twiggy.filters.names', return_how_called)
twiggy.dict_config(cfg)
assert emitters_dict_clear.call_args_list == [mocker.call()]
assert len(add_emitters.call_args_list) == 1
assert len(add_emitters.call_args_list[0][0]) == 1
the_emitter = add_emitters.call_args_list[0][0][0]
assert the_emitter[0] == 'all'
assert the_emitter[1] == twiggy.levels.DEBUG
assert the_emitter[2] is None
assert isinstance(the_emitter[3], twiggy.outputs.StreamOutput)
assert the_emitter[3]._format == twiggy.formats.line_format
assert the_emitter[3].stream == 'testing1'
cfg = copy.deepcopy(VALID_CONFIG)
del cfg['emitters']['all']
twiggy.dict_config(cfg)
# Note: This does not check that the clear call happens before the add_emitters call.
# The test_dict_config_incremental_false_order() check takes care of that.
assert emitters_dict_clear.call_args_list == [mocker.call(), mocker.call()]
assert len(add_emitters.call_args_list) == 2
assert len(add_emitters.call_args_list[1][0]) == 1
the_emitter = add_emitters.call_args_list[1][0][0]
assert the_emitter[0] == 'some'
assert the_emitter[1] == twiggy.levels.WARNING
assert the_emitter[2] == [(('a', 'b'), {}), (('c', 'd'), {})]
assert isinstance(the_emitter[3], twiggy.outputs.StreamOutput)
assert the_emitter[3]._format == twiggy.formats.shell_format
assert the_emitter[3].stream == 'testing2'
| 31.853556 | 95 | 0.66242 |
32922bccc228897a90e6808876926e0648650612 | 747 | py | Python | test/test_location.py | hazael00/TicketMaster-API-Pro | 44053f16e69ad32bcf0fd9f94f0cd81ea7c4a3e2 | [
"MIT"
] | null | null | null | test/test_location.py | hazael00/TicketMaster-API-Pro | 44053f16e69ad32bcf0fd9f94f0cd81ea7c4a3e2 | [
"MIT"
] | null | null | null | test/test_location.py | hazael00/TicketMaster-API-Pro | 44053f16e69ad32bcf0fd9f94f0cd81ea7c4a3e2 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Ticketmaster Discovery API
Swagger spec based on Ticketmaster Discovery API
OpenAPI spec version: 1.0.0
Contact: git@edward.sh
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import picketer
from picketer.rest import ApiException
from picketer.models.location import Location
class TestLocation(unittest.TestCase):
""" Location unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testLocation(self):
"""
Test Location
"""
model = picketer.models.location.Location()
if __name__ == '__main__':
unittest.main()
| 17.372093 | 68 | 0.677376 |
082854a2d1bf386af47bb0d6a83c248f87931985 | 9,323 | py | Python | nitro/resource/config/network/lacp.py | HanseMerkur/nitro-python | d03eb11f492a35a2a8b2a140322fbce22d25a8f7 | [
"Apache-2.0"
] | 2 | 2020-08-24T18:04:22.000Z | 2020-08-24T18:04:47.000Z | nitro/resource/config/network/lacp.py | HanseMerkur/nitro-python | d03eb11f492a35a2a8b2a140322fbce22d25a8f7 | [
"Apache-2.0"
] | null | null | null | nitro/resource/config/network/lacp.py | HanseMerkur/nitro-python | d03eb11f492a35a2a8b2a140322fbce22d25a8f7 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nitro.resource.base.base_resource import base_resource
from nitro.resource.base.base_resource import base_response
from nitro.service.options import options
from nitro.exception.nitro_exception import nitro_exception
from nitro.util.nitro_util import nitro_util
class lacp(base_resource) :
"""Configuration for Link aggregation control protocol resource."""
def __init__(self) :
self._syspriority = 0
self._ownernode = 0
self._devicename = ""
self._mac = ""
self._flags = 0
self._lacpkey = 0
self._clustersyspriority = 0
self._clustermac = ""
self.___count = 0
@property
def syspriority(self) :
"""Priority number that determines which peer device of an LACP LA channel can have control over the LA channel. This parameter is globally applied to all LACP channels on the NetScaler appliance. The lower the number, the higher the priority.<br/>Default value: 32768<br/>Minimum length = 1<br/>Maximum length = 65535."""
try :
return self._syspriority
except Exception as e:
raise e
@syspriority.setter
def syspriority(self, syspriority) :
"""Priority number that determines which peer device of an LACP LA channel can have control over the LA channel. This parameter is globally applied to all LACP channels on the NetScaler appliance. The lower the number, the higher the priority.<br/>Default value: 32768<br/>Minimum length = 1<br/>Maximum length = 65535
:param syspriority:
"""
try :
self._syspriority = syspriority
except Exception as e:
raise e
@property
def ownernode(self) :
"""The owner node in a cluster for which we want to set the lacp priority. Owner node can vary from 0 to 31. Ownernode value of 254 is used for Cluster.<br/>Default value: 255."""
try :
return self._ownernode
except Exception as e:
raise e
@ownernode.setter
def ownernode(self, ownernode) :
"""The owner node in a cluster for which we want to set the lacp priority. Owner node can vary from 0 to 31. Ownernode value of 254 is used for Cluster.<br/>Default value: 255
:param ownernode:
"""
try :
self._ownernode = ownernode
except Exception as e:
raise e
@property
def devicename(self) :
"""Name of the channel."""
try :
return self._devicename
except Exception as e:
raise e
@property
def mac(self) :
"""LACP system MAC."""
try :
return self._mac
except Exception as e:
raise e
@property
def flags(self) :
"""Flags of this channel."""
try :
return self._flags
except Exception as e:
raise e
@property
def lacpkey(self) :
"""LACP key of this channel."""
try :
return self._lacpkey
except Exception as e:
raise e
@property
def clustersyspriority(self) :
"""LACP system (Cluster) priority."""
try :
return self._clustersyspriority
except Exception as e:
raise e
@property
def clustermac(self) :
"""LACP system (Cluster) mac."""
try :
return self._clustermac
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
"""converts nitro response into object and returns the object array in case of get request.
:param service:
:param response:
"""
try :
result = service.payload_formatter.string_to_resource(lacp_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lacp
except Exception as e :
raise e
def _get_object_name(self) :
"""Returns the value of object identifier argument"""
try :
if self.ownernode is not None :
return str(self.ownernode)
return None
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
"""Use this API to update lacp.
:param client:
:param resource:
"""
try :
if type(resource) is not list :
updateresource = lacp()
updateresource.syspriority = resource.syspriority
updateresource.ownernode = resource.ownernode
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ lacp() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].syspriority = resource[i].syspriority
updateresources[i].ownernode = resource[i].ownernode
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
"""Use this API to fetch all the lacp resources that are configured on netscaler.
:param client:
:param name: (Default value = "")
:param option_: (Default value = "")
"""
try :
if not name :
obj = lacp()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = lacp()
obj.ownernode = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [lacp() for _ in range(len(name))]
obj = [lacp() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = lacp()
obj[i].ownernode = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
"""Use this API to fetch filtered set of lacp resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
:param client:
:param filter_:
"""
try :
obj = lacp()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
"""Use this API to count the lacp resources configured on NetScaler.
:param client:
"""
try :
obj = lacp()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
"""Use this API to count filtered the set of lacp resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
:param client:
:param filter_:
"""
try :
obj = lacp()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class lacp_response(base_response) :
""" """
def __init__(self, length=1) :
self.lacp = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lacp = [lacp() for _ in range(length)]
| 32.943463 | 332 | 0.563874 |
14742f9be18a874f617d88793ec6f22500d2889b | 443 | py | Python | content/images/sync.py | mkgilson/smirnoff-host-guest-manuscript | f9884d3ea4dc3fe4a91f67d8f258d7a75f40e056 | [
"CC-BY-4.0",
"CC0-1.0"
] | null | null | null | content/images/sync.py | mkgilson/smirnoff-host-guest-manuscript | f9884d3ea4dc3fe4a91f67d8f258d7a75f40e056 | [
"CC-BY-4.0",
"CC0-1.0"
] | 1 | 2019-05-18T13:38:56.000Z | 2019-05-18T13:38:56.000Z | content/images/sync.py | mkgilson/smirnoff-host-guest-manuscript | f9884d3ea4dc3fe4a91f67d8f258d7a75f40e056 | [
"CC-BY-4.0",
"CC0-1.0"
] | null | null | null | import os
import glob
import subprocess as sp
for file in glob.glob("*.pdf"):
print(f"Updating {file}")
p = sp.Popen(
[
"cp",
os.path.join(
"..",
"..",
"..",
"projects",
"smirnoff-host-guest-simulations",
"figures",
file,
),
".",
]
)
p.communicate()
| 19.26087 | 50 | 0.349887 |
eca19b7ecc97a2f1c184b5e5bebb1974ae938940 | 2,863 | py | Python | experts/bipedal_walker_heuristic.py | cheng-xie/dpgfddagger | 5264d5b9e0ab76fc9620da63bcfd78b25dadcbec | [
"MIT"
] | null | null | null | experts/bipedal_walker_heuristic.py | cheng-xie/dpgfddagger | 5264d5b9e0ab76fc9620da63bcfd78b25dadcbec | [
"MIT"
] | null | null | null | experts/bipedal_walker_heuristic.py | cheng-xie/dpgfddagger | 5264d5b9e0ab76fc9620da63bcfd78b25dadcbec | [
"MIT"
] | null | null | null | import sys, math
import numpy as np
import gym
from gym import spaces
from gym.utils import seeding
STAY_ON_ONE_LEG, PUT_OTHER_DOWN, PUSH_OFF = 1,2,3
SPEED = 0.29 # Will fall forward on higher speed
class BipedalWalkerExpert:
def __init__(self):
self.state = STAY_ON_ONE_LEG
self.moving_leg = 0
self.supporting_leg = 1 - self.moving_leg
self.SUPPORT_KNEE_ANGLE = +0.1
self.supporting_knee_angle = self.SUPPORT_KNEE_ANGLE
def get_next_action(self, s):
a = np.array([0.0, 0.0, 0.0, 0.0])
contact0 = s[8]
contact1 = s[13]
moving_s_base = 4 + 5*self.moving_leg
supporting_s_base = 4 + 5*self.supporting_leg
hip_targ = [None,None] # -0.8 .. +1.1
knee_targ = [None,None] # -0.6 .. +0.9
hip_todo = [0.0, 0.0]
knee_todo = [0.0, 0.0]
if self.state==STAY_ON_ONE_LEG:
hip_targ[self.moving_leg] = 1.1
knee_targ[self.moving_leg] = -0.6
self.supporting_knee_angle += 0.03
if s[2] > SPEED: self.supporting_knee_angle += 0.03
self.supporting_knee_angle = min( self.supporting_knee_angle, self.SUPPORT_KNEE_ANGLE )
knee_targ[self.supporting_leg] = self.supporting_knee_angle
if s[supporting_s_base+0] < 0.10: # supporting leg is behind
self.state = PUT_OTHER_DOWN
if self.state==PUT_OTHER_DOWN:
hip_targ[self.moving_leg] = +0.1
knee_targ[self.moving_leg] = self.SUPPORT_KNEE_ANGLE
knee_targ[self.supporting_leg] = self.supporting_knee_angle
if s[moving_s_base+4]:
self.state = PUSH_OFF
self.supporting_knee_angle = min( s[moving_s_base+2], self.SUPPORT_KNEE_ANGLE )
if self.state==PUSH_OFF:
knee_targ[self.moving_leg] = self.supporting_knee_angle
knee_targ[self.supporting_leg] = +1.0
if s[supporting_s_base+2] > 0.88 or s[2] > 1.2*SPEED:
self.state = STAY_ON_ONE_LEG
self.moving_leg = 1 - self.moving_leg
self.supporting_leg = 1 - self.moving_leg
if hip_targ[0]: hip_todo[0] = 0.9*(hip_targ[0] - s[4]) - 0.25*s[5]
if hip_targ[1]: hip_todo[1] = 0.9*(hip_targ[1] - s[9]) - 0.25*s[10]
if knee_targ[0]: knee_todo[0] = 4.0*(knee_targ[0] - s[6]) - 0.25*s[7]
if knee_targ[1]: knee_todo[1] = 4.0*(knee_targ[1] - s[11]) - 0.25*s[12]
hip_todo[0] -= 0.9*(0-s[0]) - 1.5*s[1] # PID to keep head strait
hip_todo[1] -= 0.9*(0-s[0]) - 1.5*s[1]
knee_todo[0] -= 15.0*s[3] # vertical speed, to damp oscillations
knee_todo[1] -= 15.0*s[3]
a[0] = hip_todo[0]
a[1] = knee_todo[0]
a[2] = hip_todo[1]
a[3] = knee_todo[1]
a = np.clip(0.5*a, -1.0, 1.0)
return a
| 38.689189 | 99 | 0.584003 |
5f821d645f27f4b7fef7a3b3d2c26f9cf36acbf6 | 61 | py | Python | LogInDB.py | the-man-with-no-name/encrypted-password-manager | 685545a61e0204102f9cfc4734994c7d1a48cfe0 | [
"MIT"
] | 1 | 2021-03-05T10:49:59.000Z | 2021-03-05T10:49:59.000Z | LogInDB.py | the-man-with-no-name/encrypted-password-manager | 685545a61e0204102f9cfc4734994c7d1a48cfe0 | [
"MIT"
] | null | null | null | LogInDB.py | the-man-with-no-name/encrypted-password-manager | 685545a61e0204102f9cfc4734994c7d1a48cfe0 | [
"MIT"
] | null | null | null | from utils import main
if __name__ == '__main__':
main() | 15.25 | 26 | 0.672131 |
023ddffc828d6ac3758c16a24c23a9171bad21cd | 24,178 | py | Python | python3-alpha/python3-src/Lib/distutils/util.py | stormtheh4ck3r/python-for-android | b9ea9161392f60566b81482b1e25cd77004d5c45 | [
"Apache-2.0"
] | 4 | 2016-05-04T07:05:22.000Z | 2020-09-24T00:21:05.000Z | python3-alpha/python3-src/Lib/distutils/util.py | rockyzhang/zhangyanhit-python-for-android-mips | 799dd5ca16f72135f2eab71e144a68842e2aaee0 | [
"Apache-2.0"
] | null | null | null | python3-alpha/python3-src/Lib/distutils/util.py | rockyzhang/zhangyanhit-python-for-android-mips | 799dd5ca16f72135f2eab71e144a68842e2aaee0 | [
"Apache-2.0"
] | 1 | 2018-12-12T03:06:17.000Z | 2018-12-12T03:06:17.000Z | """distutils.util
Miscellaneous utility functions -- anything that doesn't fit into
one of the other *util.py modules.
"""
import sys, os, string, re
from distutils.errors import DistutilsPlatformError
from distutils.dep_util import newer
from distutils.spawn import spawn
from distutils import log
from distutils.errors import DistutilsByteCompileError
def get_platform ():
"""Return a string that identifies the current platform. This is used
mainly to distinguish platform-specific build directories and
platform-specific built distributions. Typically includes the OS name
and version and the architecture (as supplied by 'os.uname()'),
although the exact information included depends on the OS; eg. for IRIX
the architecture isn't particularly important (IRIX only runs on SGI
hardware), but for Linux the kernel version isn't particularly
important.
Examples of returned values:
linux-i586
linux-alpha (?)
solaris-2.6-sun4u
irix-5.3
irix64-6.2
Windows will return one of:
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
win-ia64 (64bit Windows on Itanium)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return sys.platform
j = sys.version.find(")", i)
look = sys.version[i+len(prefix):j].lower()
if look == 'amd64':
return 'win-amd64'
if look == 'itanium':
return 'win-ia64'
return sys.platform
if os.name != "posix" or not hasattr(os, 'uname'):
# XXX what about the architecture? NT is Intel or Alpha,
# Mac OS is M68k or PPC, etc.
return sys.platform
# Try to distinguish various flavours of Unix
(osname, host, release, version, machine) = os.uname()
# Convert the OS name to lowercase, remove '/' characters
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
osname = osname.lower().replace('/', '')
machine = machine.replace(' ', '_')
machine = machine.replace('/', '-')
if osname[:5] == "linux":
# At least on Linux/Intel, 'machine' is the processor --
# i386, etc.
# XXX what about Alpha, SPARC, etc?
return "%s-%s" % (osname, machine)
elif osname[:5] == "sunos":
if release[0] >= "5": # SunOS 5 == Solaris 2
osname = "solaris"
release = "%d.%s" % (int(release[0]) - 3, release[2:])
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
elif osname[:3] == "aix":
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
rel_re = re.compile (r'[\d.]+', re.ASCII)
m = rel_re.match(release)
if m:
release = m.group()
elif osname[:6] == "darwin":
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
from distutils.sysconfig import get_config_vars
cfgvars = get_config_vars()
macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
if 1:
# Always calculate the release of the running machine,
# needed to determine if we can build fat binaries or not.
macrelease = macver
# Get the system version. Reading this plist is a documented
# way to get the system version (see the documentation for
# the Gestalt Manager)
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except IOError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
try:
m = re.search(
r'<key>ProductUserVisibleVersion</key>\s*' +
r'<string>(.*?)</string>', f.read())
if m is not None:
macrelease = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
finally:
f.close()
if not macver:
macver = macrelease
if macver:
from distutils.sysconfig import get_config_vars
release = macver
osname = "macosx"
if (macrelease + '.') >= '10.4.' and \
'-arch' in get_config_vars().get('CFLAGS', '').strip():
# The universal build will build fat binaries, but not on
# systems before 10.4
#
# Try to detect 4-way universal builds, those have machine-type
# 'universal' instead of 'fat'.
machine = 'fat'
cflags = get_config_vars().get('CFLAGS')
archs = re.findall('-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r"%(archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxsize >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
machine = 'ppc'
# See 'i386' case
if sys.maxsize >= 2**32:
machine = 'ppc64'
return "%s-%s-%s" % (osname, release, machine)
# get_platform ()
def convert_path (pathname):
"""Return 'pathname' as a name that will work on the native filesystem,
i.e. split it on '/' and put it back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we can actually use them in the filesystem. Raises
ValueError on non-Unix-ish systems if 'pathname' either starts or
ends with a slash.
"""
if os.sep == '/':
return pathname
if not pathname:
return pathname
if pathname[0] == '/':
raise ValueError("path '%s' cannot be absolute" % pathname)
if pathname[-1] == '/':
raise ValueError("path '%s' cannot end with '/'" % pathname)
paths = pathname.split('/')
while '.' in paths:
paths.remove('.')
if not paths:
return os.curdir
return os.path.join(*paths)
# convert_path ()
def change_root (new_root, pathname):
"""Return 'pathname' with 'new_root' prepended. If 'pathname' is
relative, this is equivalent to "os.path.join(new_root,pathname)".
Otherwise, it requires making 'pathname' relative and then joining the
two, which is tricky on DOS/Windows and Mac OS.
"""
if os.name == 'posix':
if not os.path.isabs(pathname):
return os.path.join(new_root, pathname)
else:
return os.path.join(new_root, pathname[1:])
elif os.name == 'nt':
(drive, path) = os.path.splitdrive(pathname)
if path[0] == '\\':
path = path[1:]
return os.path.join(new_root, path)
elif os.name == 'os2':
(drive, path) = os.path.splitdrive(pathname)
if path[0] == os.sep:
path = path[1:]
return os.path.join(new_root, path)
else:
raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
_environ_checked = 0
def check_environ ():
"""Ensure that 'os.environ' has all the environment variables we
guarantee that users can use in config files, command-line options,
etc. Currently this includes:
HOME - user's home directory (Unix only)
PLAT - description of the current platform, including hardware
and OS (see 'get_platform()')
"""
global _environ_checked
if _environ_checked:
return
if os.name == 'posix' and 'HOME' not in os.environ:
import pwd
os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
if 'PLAT' not in os.environ:
os.environ['PLAT'] = get_platform()
_environ_checked = 1
def subst_vars (s, local_vars):
"""Perform shell/Perl-style variable substitution on 'string'. Every
occurrence of '$' followed by a name is considered a variable, and
variable is substituted by the value found in the 'local_vars'
dictionary, or in 'os.environ' if it's not in 'local_vars'.
'os.environ' is first checked/augmented to guarantee that it contains
certain values: see 'check_environ()'. Raise ValueError for any
variables not found in either 'local_vars' or 'os.environ'.
"""
check_environ()
def _subst (match, local_vars=local_vars):
var_name = match.group(1)
if var_name in local_vars:
return str(local_vars[var_name])
else:
return os.environ[var_name]
try:
return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
except KeyError as var:
raise ValueError("invalid variable '$%s'" % var)
# subst_vars ()
def grok_environment_error (exc, prefix="error: "):
"""Generate a useful error message from an EnvironmentError (IOError or
OSError) exception object. Handles Python 1.5.1 and 1.5.2 styles, and
does what it can to deal with exception objects that don't have a
filename (which happens when the error is due to a two-file operation,
such as 'rename()' or 'link()'. Returns the error message as a string
prefixed with 'prefix'.
"""
# check for Python 1.5.2-style {IO,OS}Error exception objects
if hasattr(exc, 'filename') and hasattr(exc, 'strerror'):
if exc.filename:
error = prefix + "%s: %s" % (exc.filename, exc.strerror)
else:
# two-argument functions in posix module don't
# include the filename in the exception object!
error = prefix + "%s" % exc.strerror
else:
error = prefix + str(exc.args[-1])
return error
# Needed by 'split_quoted()'
_wordchars_re = _squote_re = _dquote_re = None
def _init_regex():
global _wordchars_re, _squote_re, _dquote_re
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
def split_quoted (s):
"""Split a string up according to Unix shell-like rules for quotes and
backslashes. In short: words are delimited by spaces, as long as those
spaces are not escaped by a backslash, or inside a quoted string.
Single and double quotes are equivalent, and the quote characters can
be backslash-escaped. The backslash is stripped from any two-character
escape sequence, leaving only the escaped character. The quote
characters are stripped from any quoted string. Returns a list of
words.
"""
# This is a nice algorithm for splitting up a single string, since it
# doesn't require character-by-character examination. It was a little
# bit of a brain-bender to get it working right, though...
if _wordchars_re is None: _init_regex()
s = s.strip()
words = []
pos = 0
while s:
m = _wordchars_re.match(s, pos)
end = m.end()
if end == len(s):
words.append(s[:end])
break
if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
words.append(s[:end]) # we definitely have a word delimiter
s = s[end:].lstrip()
pos = 0
elif s[end] == '\\': # preserve whatever is being escaped;
# will become part of the current word
s = s[:end] + s[end+1:]
pos = end+1
else:
if s[end] == "'": # slurp singly-quoted string
m = _squote_re.match(s, end)
elif s[end] == '"': # slurp doubly-quoted string
m = _dquote_re.match(s, end)
else:
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
if m is None:
raise ValueError("bad string (mismatched %s quotes?)" % s[end])
(beg, end) = m.span()
s = s[:beg] + s[beg+1:end-1] + s[end:]
pos = m.end() - 2
if pos >= len(s):
words.append(s)
break
return words
# split_quoted ()
def execute (func, args, msg=None, verbose=0, dry_run=0):
"""Perform some action that affects the outside world (eg. by
writing to the filesystem). Such actions are special because they
are disabled by the 'dry_run' flag. This method takes care of all
that bureaucracy for you; all you have to do is supply the
function to call and an argument tuple for it (to embody the
"external action" being performed), and an optional message to
print.
"""
if msg is None:
msg = "%s%r" % (func.__name__, args)
if msg[-2:] == ',)': # correct for singleton tuple
msg = msg[0:-2] + ')'
log.info(msg)
if not dry_run:
func(*args)
def strtobool (val):
"""Convert a string representation of truth to true (1) or false (0).
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
'val' is anything else.
"""
val = val.lower()
if val in ('y', 'yes', 't', 'true', 'on', '1'):
return 1
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
return 0
else:
raise ValueError("invalid truth value %r" % (val,))
def byte_compile (py_files,
optimize=0, force=0,
prefix=None, base_dir=None,
verbose=1, dry_run=0,
direct=None):
"""Byte-compile a collection of Python source files to either .pyc
or .pyo files in the same directory. 'py_files' is a list of files
to compile; any files that don't end in ".py" are silently skipped.
'optimize' must be one of the following:
0 - don't optimize (generate .pyc)
1 - normal optimization (like "python -O")
2 - extra optimization (like "python -OO")
If 'force' is true, all files are recompiled regardless of
timestamps.
The source filename encoded in each bytecode file defaults to the
filenames listed in 'py_files'; you can modify these with 'prefix' and
'basedir'. 'prefix' is a string that will be stripped off of each
source filename, and 'base_dir' is a directory name that will be
prepended (after 'prefix' is stripped). You can supply either or both
(or neither) of 'prefix' and 'base_dir', as you wish.
If 'dry_run' is true, doesn't actually do anything that would
affect the filesystem.
Byte-compilation is either done directly in this interpreter process
with the standard py_compile module, or indirectly by writing a
temporary script and executing it. Normally, you should let
'byte_compile()' figure out to use direct compilation or not (see
the source for details). The 'direct' flag is used by the script
generated in indirect mode; unless you know what you're doing, leave
it set to None.
"""
# nothing is done if sys.dont_write_bytecode is True
if sys.dont_write_bytecode:
raise DistutilsByteCompileError('byte-compiling is disabled.')
# First, if the caller didn't force us into direct or indirect mode,
# figure out which mode we should be in. We take a conservative
# approach: choose direct mode *only* if the current interpreter is
# in debug mode and optimize is 0. If we're not in debug mode (-O
# or -OO), we don't know which level of optimization this
# interpreter is running with, so we can't do direct
# byte-compilation and be certain that it's the right thing. Thus,
# always compile indirectly if the current interpreter is in either
# optimize mode, or if either optimization level was requested by
# the caller.
if direct is None:
direct = (__debug__ and optimize == 0)
# "Indirect" byte-compilation: write a temporary script and then
# run it with the appropriate flags.
if not direct:
try:
from tempfile import mkstemp
(script_fd, script_name) = mkstemp(".py")
except ImportError:
from tempfile import mktemp
(script_fd, script_name) = None, mktemp(".py")
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
if script_fd is not None:
script = os.fdopen(script_fd, "w")
else:
script = open(script_name, "w")
script.write("""\
from distutils.util import byte_compile
files = [
""")
# XXX would be nice to write absolute filenames, just for
# safety's sake (script should be more robust in the face of
# chdir'ing before running it). But this requires abspath'ing
# 'prefix' as well, and that breaks the hack in build_lib's
# 'byte_compile()' method that carefully tacks on a trailing
# slash (os.sep really) to make sure the prefix here is "just
# right". This whole prefix business is rather delicate -- the
# problem is that it's really a directory, but I'm treating it
# as a dumb string, so trailing slashes and so forth matter.
#py_files = map(os.path.abspath, py_files)
#if prefix:
# prefix = os.path.abspath(prefix)
script.write(",\n".join(map(repr, py_files)) + "]\n")
script.write("""
byte_compile(files, optimize=%r, force=%r,
prefix=%r, base_dir=%r,
verbose=%r, dry_run=0,
direct=1)
""" % (optimize, force, prefix, base_dir, verbose))
script.close()
cmd = [sys.executable, script_name]
if optimize == 1:
cmd.insert(1, "-O")
elif optimize == 2:
cmd.insert(1, "-OO")
spawn(cmd, dry_run=dry_run)
execute(os.remove, (script_name,), "removing %s" % script_name,
dry_run=dry_run)
# "Direct" byte-compilation: use the py_compile module to compile
# right here, right now. Note that the script generated in indirect
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
# cross-process recursion. Hey, it works!
else:
from py_compile import compile
for file in py_files:
if file[-3:] != ".py":
# This lets us be lazy and not filter filenames in
# the "install_lib" command.
continue
# Terminology from the py_compile module:
# cfile - byte-compiled file
# dfile - purported source filename (same as 'file' by default)
cfile = file + (__debug__ and "c" or "o")
dfile = file
if prefix:
if file[:len(prefix)] != prefix:
raise ValueError("invalid prefix: filename %r doesn't start with %r"
% (file, prefix))
dfile = dfile[len(prefix):]
if base_dir:
dfile = os.path.join(base_dir, dfile)
cfile_base = os.path.basename(cfile)
if direct:
if force or newer(file, cfile):
log.info("byte-compiling %s to %s", file, cfile_base)
if not dry_run:
compile(file, cfile, dfile)
else:
log.debug("skipping byte-compilation of %s to %s",
file, cfile_base)
# byte_compile ()
def rfc822_escape (header):
"""Return a version of the string escaped for inclusion in an
RFC-822 header, by ensuring there are 8 spaces space after each newline.
"""
lines = header.split('\n')
sep = '\n' + 8 * ' '
return sep.join(lines)
# 2to3 support
def run_2to3(files, fixer_names=None, options=None, explicit=None):
"""Invoke 2to3 on a list of Python files.
The files should all come from the build area, as the
modification is done in-place. To reduce the build time,
only files modified since the last invocation of this
function should be passed in the files argument."""
if not files:
return
# Make this class local, to delay import of 2to3
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class DistutilsRefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = DistutilsRefactoringTool(fixer_names, options=options)
r.refactor(files, write=True)
def copydir_run_2to3(src, dest, template=None, fixer_names=None,
options=None, explicit=None):
"""Recursively copy a directory, only copying new and changed files,
running run_2to3 over all newly copied Python modules afterward.
If you give a template string, it's parsed like a MANIFEST.in.
"""
from distutils.dir_util import mkpath
from distutils.file_util import copy_file
from distutils.filelist import FileList
filelist = FileList()
curdir = os.getcwd()
os.chdir(src)
try:
filelist.findall()
finally:
os.chdir(curdir)
filelist.files[:] = filelist.allfiles
if template:
for line in template.splitlines():
line = line.strip()
if not line: continue
filelist.process_template_line(line)
copied = []
for filename in filelist.files:
outname = os.path.join(dest, filename)
mkpath(os.path.dirname(outname))
res = copy_file(os.path.join(src, filename), outname, update=1)
if res[1]: copied.append(outname)
run_2to3([fn for fn in copied if fn.lower().endswith('.py')],
fixer_names=fixer_names, options=options, explicit=explicit)
return copied
class Mixin2to3:
'''Mixin class for commands that run 2to3.
To configure 2to3, setup scripts may either change
the class variables, or inherit from individual commands
to override how 2to3 is invoked.'''
# provide list of fixers to run;
# defaults to all from lib2to3.fixers
fixer_names = None
# options dictionary
options = None
# list of fixers to invoke even though they are marked as explicit
explicit = None
def run_2to3(self, files):
return run_2to3(files, self.fixer_names, self.options, self.explicit)
| 37.660436 | 88 | 0.595293 |
ef19360aa9f7d720b2ea56dda738b87cd0e61433 | 1,029 | py | Python | Lib/site-packages/oauth2client/__init__.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 78 | 2017-08-19T03:46:13.000Z | 2020-02-19T04:29:45.000Z | Lib/site-packages/oauth2client/__init__.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 5 | 2017-08-21T16:33:08.000Z | 2018-06-21T18:37:18.000Z | Lib/site-packages/oauth2client/__init__.py | Kronos3/pyexec | c9e76a0302dee047ed137bc38aa669cec04c24cd | [
"bzip2-1.0.6"
] | 13 | 2017-08-19T16:46:08.000Z | 2018-11-05T23:11:34.000Z | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.0.0'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code'
GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
GOOGLE_TOKEN_URI = 'https://www.googleapis.com/oauth2/v4/token'
GOOGLE_TOKEN_INFO_URI = 'https://www.googleapis.com/oauth2/v3/tokeninfo'
| 42.875 | 74 | 0.76482 |
a95014bfacaa4a0a2aa865acf6cad3667633022e | 1,911 | py | Python | setup.py | TomekTrzeciak/clize | 4ed5be0eabe67ee5e06173da406899f9aa1c176c | [
"MIT"
] | null | null | null | setup.py | TomekTrzeciak/clize | 4ed5be0eabe67ee5e06173da406899f9aa1c176c | [
"MIT"
] | null | null | null | setup.py | TomekTrzeciak/clize | 4ed5be0eabe67ee5e06173da406899f9aa1c176c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from setuptools import setup
with open("README.rst") as fh:
long_description = fh.read()
setup(
name='clize',
version='4.1.1',
description='Turn functions into command-line interfaces',
long_description=long_description,
long_description_content_type='text/x-rst',
license='MIT',
url='https://github.com/epsy/clize',
author='Yann Kaiser',
author_email='kaiser.yann@gmail.com',
install_requires=[
'six',
'sigtools >= 2.0',
'attrs>=19.1.0,<20',
'od',
'docutils',
],
tests_require=[
'repeated_test',
'unittest2',
'python-dateutil',
'Pygments',
],
extras_require={
'datetime': ['python-dateutil'],
},
packages=('clize', 'clize.tests'),
test_suite='clize.tests',
keywords=[
'CLI', 'options', 'arguments', 'getopts', 'getopt', 'argparse',
'introspection', 'flags', 'decorator', 'subcommands',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: User Interfaces",
],
)
| 31.327869 | 71 | 0.584511 |
bac2323b157b136a3cc98afddd0ecc8b9ab5e547 | 357 | bzl | Python | src/test/java/com/google/devtools/build/skydoc/testdata/repo_rules_test/input.bzl | zzmp/bazel | e9dfdc510c7d6f0c853e0baad2a10987f47b3f6a | [
"Apache-2.0"
] | 3 | 2019-03-18T23:49:16.000Z | 2021-05-30T09:44:18.000Z | src/test/java/com/google/devtools/build/skydoc/testdata/repo_rules_test/input.bzl | installation00/bazel | 6f38f345a1bd278a71170c5d80aba3928afdc6ec | [
"Apache-2.0"
] | null | null | null | src/test/java/com/google/devtools/build/skydoc/testdata/repo_rules_test/input.bzl | installation00/bazel | 6f38f345a1bd278a71170c5d80aba3928afdc6ec | [
"Apache-2.0"
] | 1 | 2020-11-14T00:11:50.000Z | 2020-11-14T00:11:50.000Z | def _repo_rule_impl(ctx):
ctx.file("BUILD", "")
my_repo = repository_rule(
implementation = _repo_rule_impl,
doc = "Minimal example of a repository rule.",
attrs = {
"useless" : attr.string(
doc = "This argument will be ingored. You don't have to specify it, but you may.",
default = "ignoreme",
),
},
)
| 25.5 | 91 | 0.59944 |
9f7aaf87b2f89a0181c1b74c98b80bedc94aa104 | 883 | py | Python | tfx_addons/feature_selection/feature_selection/module_file.py | pratishtha-abrol/tfx-addons | ca9936442d60104e80dd04914e2a6ee6047d0d5c | [
"Apache-2.0"
] | null | null | null | tfx_addons/feature_selection/feature_selection/module_file.py | pratishtha-abrol/tfx-addons | ca9936442d60104e80dd04914e2a6ee6047d0d5c | [
"Apache-2.0"
] | null | null | null | tfx_addons/feature_selection/feature_selection/module_file.py | pratishtha-abrol/tfx-addons | ca9936442d60104e80dd04914e2a6ee6047d0d5c | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from sklearn.feature_selection import SelectKBest as SelectorFunc
from sklearn.feature_selection import chi2
SELECTOR_PARAMS = {
"score_func": chi2,
"k": 2
}
TARGET_FEATURE = 'species'
| 35.32 | 80 | 0.696489 |
9aaf3d488bcd642ef4b085ec5e4d85b4f311fcef | 5,462 | py | Python | redash/query_runner/oracle.py | cemremengu/redash | 25470b11fd7e0e7f452ef1ac52680039a9dc4f23 | [
"BSD-2-Clause"
] | null | null | null | redash/query_runner/oracle.py | cemremengu/redash | 25470b11fd7e0e7f452ef1ac52680039a9dc4f23 | [
"BSD-2-Clause"
] | null | null | null | redash/query_runner/oracle.py | cemremengu/redash | 25470b11fd7e0e7f452ef1ac52680039a9dc4f23 | [
"BSD-2-Clause"
] | null | null | null | import logging
from redash.utils import json_dumps, json_loads
from redash.query_runner import *
try:
import cx_Oracle
TYPES_MAP = {
cx_Oracle.DATETIME: TYPE_DATETIME,
cx_Oracle.CLOB: TYPE_STRING,
cx_Oracle.LOB: TYPE_STRING,
cx_Oracle.FIXED_CHAR: TYPE_STRING,
cx_Oracle.FIXED_NCHAR: TYPE_STRING,
cx_Oracle.INTERVAL: TYPE_DATETIME,
cx_Oracle.LONG_STRING: TYPE_STRING,
cx_Oracle.NATIVE_FLOAT: TYPE_FLOAT,
cx_Oracle.NCHAR: TYPE_STRING,
cx_Oracle.NUMBER: TYPE_FLOAT,
cx_Oracle.ROWID: TYPE_INTEGER,
cx_Oracle.STRING: TYPE_STRING,
cx_Oracle.TIMESTAMP: TYPE_DATETIME,
}
ENABLED = True
except ImportError:
ENABLED = False
logger = logging.getLogger(__name__)
class Oracle(BaseSQLQueryRunner):
noop_query = "SELECT 1 FROM dual"
@classmethod
def get_col_type(cls, col_type, scale):
if col_type == cx_Oracle.NUMBER:
return TYPE_INTEGER if scale is False else TYPE_FLOAT
else:
return TYPES_MAP.get(col_type, None)
@classmethod
def enabled(cls):
return ENABLED
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"user": {"type": "string"},
"password": {"type": "string"},
"host": {"type": "string"},
"port": {"type": "number"},
"servicename": {"type": "string", "title": "DSN Service Name"},
},
"required": ["servicename", "user", "password", "host", "port"],
"secret": ["password"],
}
@classmethod
def type(cls):
return "oracle"
def __init__(self, configuration):
super(Oracle, self).__init__(configuration)
dsn = cx_Oracle.makedsn(
self.configuration["host"],
self.configuration["port"],
service_name=self.configuration["servicename"],
)
self.connection_string = "{}/{}@{}".format(
self.configuration["user"], self.configuration["password"], dsn
)
def _get_tables(self, schema):
query = """
SELECT
all_tab_cols.OWNER,
all_tab_cols.TABLE_NAME,
all_tab_cols.COLUMN_NAME
FROM all_tab_cols
WHERE all_tab_cols.OWNER NOT IN('SYS','SYSTEM','ORDSYS','CTXSYS','WMSYS','MDSYS','ORDDATA','XDB','OUTLN','DMSYS','DSSYS','EXFSYS','LBACSYS','TSMSYS')
"""
results, error = self.run_query(query, None)
if error is not None:
raise Exception("Failed getting schema.")
results = json_loads(results)
for row in results["rows"]:
if row["OWNER"] != None:
table_name = "{}.{}".format(row["OWNER"], row["TABLE_NAME"])
else:
table_name = row["TABLE_NAME"]
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
schema[table_name]["columns"].append(row["COLUMN_NAME"])
return list(schema.values())
@classmethod
def _convert_number(cls, value):
try:
return int(value)
except:
return value
@classmethod
def output_handler(cls, cursor, name, default_type, length, precision, scale):
if default_type in (cx_Oracle.CLOB, cx_Oracle.LOB):
return cursor.var(cx_Oracle.LONG_STRING, 80000, cursor.arraysize)
if default_type in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
return cursor.var(str, length, cursor.arraysize)
if default_type == cx_Oracle.NUMBER:
if scale <= 0:
return cursor.var(
cx_Oracle.STRING,
255,
outconverter=Oracle._convert_number,
arraysize=cursor.arraysize,
)
def run_query(self, query, user):
connection = cx_Oracle.connect(self.connection_string)
connection.outputtypehandler = Oracle.output_handler
cursor = connection.cursor()
try:
cursor.execute(query)
rows_count = cursor.rowcount
if cursor.description is not None:
columns = self.fetch_columns(
[
(i[0], Oracle.get_col_type(i[1], i[5]))
for i in cursor.description
]
)
rows = [
dict(zip((column["name"] for column in columns), row))
for row in cursor
]
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data)
else:
columns = [{"name": "Row(s) Affected", "type": "TYPE_INTEGER"}]
rows = [{"Row(s) Affected": rows_count}]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
connection.commit()
except cx_Oracle.DatabaseError as err:
error = "Query failed. {}.".format(str(err))
json_data = None
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
finally:
connection.close()
return json_data, error
register(Oracle)
| 31.211429 | 157 | 0.55108 |
d2a9810700e51c2911d70c69124a06fc1f8cca3d | 1,293 | py | Python | text_collector/spiders/aceh_harianrakyat.py | gusman/web-crawler | d7a41cab2ce82d0e6b0daf5db05944381f0a4bbd | [
"MIT"
] | null | null | null | text_collector/spiders/aceh_harianrakyat.py | gusman/web-crawler | d7a41cab2ce82d0e6b0daf5db05944381f0a4bbd | [
"MIT"
] | null | null | null | text_collector/spiders/aceh_harianrakyat.py | gusman/web-crawler | d7a41cab2ce82d0e6b0daf5db05944381f0a4bbd | [
"MIT"
] | null | null | null | import scrapy
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
class ItemNews(scrapy.Item):
date = scrapy.Field()
title = scrapy.Field()
content = scrapy.Field()
class HarianRakyatAcehSpider(CrawlSpider):
stop_flag = False
name = "aceh_harianrakyat"
allowed_domains = [ 'harianrakyataceh.com' ]
start_urls = [
'https://harianrakyataceh.com/',
]
rules = (
Rule(
LinkExtractor(
allow=(
'([A-Za-z])\w+\/[0-9]{4}\/[0-9]{2}\/[0-9]{2}',
'/[0-9]{4}\/[0-9]{2}\/[0-9]{2}',
)
),
callback='parse_detail'
),
Rule(
LinkExtractor(
allow=('page'),
deny=('epaper', 'category/photo'),
),
)
)
def parse_detail(self, response):
#self.logger.info('\n >> PROCESSING in parse_detail %s\n', response.url)
item = ItemNews()
item['title'] = response.xpath('//h1[@class="entry-title"]/text()').get()
item['date'] = response.xpath('//time/text()').get()
content = response.xpath('//article//*/p//text()').getall()
item['content'] = "".join(content)
yield item
| 28.733333 | 81 | 0.512761 |
98cf885c43940cf0ee033f2a2821182c616d9faf | 321 | py | Python | mmdet/core/post_processing/__init__.py | LEOCUIZHIHAO/kpmask | 73fe907b2359b7ddc2927cd325bbbb686eb62ffd | [
"Apache-2.0"
] | 1 | 2021-03-01T08:54:48.000Z | 2021-03-01T08:54:48.000Z | mmdet/core/post_processing/__init__.py | LEOCUIZHIHAO/kpmask | 73fe907b2359b7ddc2927cd325bbbb686eb62ffd | [
"Apache-2.0"
] | null | null | null | mmdet/core/post_processing/__init__.py | LEOCUIZHIHAO/kpmask | 73fe907b2359b7ddc2927cd325bbbb686eb62ffd | [
"Apache-2.0"
] | null | null | null | from .bbox_nms import multiclass_nms, keypoint_box_nms
from .merge_augs import (merge_aug_bboxes, merge_aug_masks,
merge_aug_proposals, merge_aug_scores)
__all__ = [
'multiclass_nms', 'merge_aug_proposals', 'merge_aug_bboxes',
'merge_aug_scores', 'merge_aug_masks', 'keypoint_box_nms'
]
| 35.666667 | 64 | 0.738318 |
c5b2048916f9b81100f28ba5337b850531491955 | 3,964 | py | Python | sentence_generator.py | mafagafogigante/nlp | c37922b69cdce2f3388a90003bca8cc9da36feda | [
"BSD-2-Clause"
] | null | null | null | sentence_generator.py | mafagafogigante/nlp | c37922b69cdce2f3388a90003bca8cc9da36feda | [
"BSD-2-Clause"
] | 5 | 2016-01-17T05:03:11.000Z | 2016-01-17T13:15:02.000Z | sentence_generator.py | mafagafogigante/nlp | c37922b69cdce2f3388a90003bca8cc9da36feda | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python3
import random
import re
from collections import defaultdict
preposition_list = open('prepositions.txt').read().split('\n')
conjunction_list = open('conjunctions.txt').read().split('\n')
# Improvement: do not keep a bi-gram if the words are separated by a period.
def remove_punctuation(text):
return re.sub("[^\w]+'|'[^\w]+|[^\w']+", " ", text)
def is_preposition(token):
"""
Returns whether or not the provided token is a preposition.
:param token: a single word
"""
return token.lower() in preposition_list
def is_conjunction(sentence):
"""
Returns whether or not the provided sequence of words is a conjunction.
:param sentence: a list of words
"""
words = ' '.join(sentence).lower()
return words in conjunction_list
def ends_in_conjunction(sentence):
"""
Evaluates if the specified sentence ends with a conjunction.
:param sentence: a list of words
"""
# Relies on the fact that the biggest conjunction has three words.
biggest_number_of_words_in_a_conjunction = 3
for i in range(min(len(sentence), biggest_number_of_words_in_a_conjunction)):
if is_conjunction(sentence[-(i + 1):]):
return True
return False
def is_article(token):
return token.lower() in ('a', 'an', 'the')
def has_good_ending(sentence):
"""
Evaluates whether or not the sentence has a good ending.
:param sentence: an iterable of words
"""
last_token = sentence[-1]
if not is_article(last_token):
if not is_preposition(last_token):
if not ends_in_conjunction(sentence):
return True
return False
def is_number(token):
for character in token:
if not character.isdigit():
return False
return True
def remove_numbers(words):
# Currently just removes integers
result = []
for word in words:
if not is_number(word):
result.append(word)
return result
def make_probability_table(unigram_count, bigram_count):
probability_table = dict(bigram_count)
for key in probability_table:
probability_table[key] /= unigram_count[key[0]]
return probability_table
def get_next_token(word, table):
"""
Based on a probability table and a word, randomly picks a word that could follow the provided word.
:param table: a probability table of bigrams
:param word: a unigram
"""
choices = {}
for bigram, probability in table.items():
if word == bigram[0]:
choices[bigram[1]] = probability
if len(choices) == 0:
return "<IMPOSSIBLE>"
else:
words = list(choices.keys())
values = list(choices.values())
value_sum = sum(values)
magic_number = random.random() * value_sum
chosen_word_index = -1
while magic_number > 0:
chosen_word_index += 1
magic_number -= values[chosen_word_index]
return words[chosen_word_index]
def make_sentence(table, minimum_sentence_length=6):
# Get a random word from the body
sentence = [random.choice(list(table.keys()))[0]]
while len(sentence) < minimum_sentence_length or not has_good_ending(sentence):
sentence.append(get_next_token(sentence[-1], table))
return ' '.join(sentence)
def main():
with open('corpus.txt') as corpus:
text = corpus.read()
text = text.lower()
text = remove_punctuation(text)
words = text.split()
words = remove_numbers(words)
unigram_table = defaultdict(lambda: 0)
bigram_table = defaultdict(lambda: 0)
for i in range(len(words) - 1):
unigram_table[words[i]] += 1
bigram_table[tuple(words[i:i + 2])] += 1
probability_table = make_probability_table(unigram_table, bigram_table)
for i in range(10):
print(make_sentence(probability_table))
if __name__ == '__main__':
main()
| 28.724638 | 103 | 0.651867 |
3875cf98756e197f1fe2dec8bdae4ac783b370b6 | 1,223 | py | Python | var/spack/repos/builtin/packages/r-reshape2/package.py | varioustoxins/spack | cab0e4cb240f34891a6d753f3393e512f9a99e9a | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/r-reshape2/package.py | varioustoxins/spack | cab0e4cb240f34891a6d753f3393e512f9a99e9a | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 6 | 2022-01-08T08:41:11.000Z | 2022-03-14T19:28:07.000Z | var/spack/repos/builtin/packages/r-reshape2/package.py | foeroyingur/spack | 5300cbbb2e569190015c72d0970d25425ea38647 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RReshape2(RPackage):
"""Flexibly Reshape Data: A Reboot of the Reshape Package
Flexibly restructure and aggregate data using just two functions: melt
and dcast (or acast)."""
homepage = "https://github.com/hadley/reshape"
url = "https://cloud.r-project.org/src/contrib/reshape2_1.4.2.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/reshape2"
version('1.4.4', sha256='d88dcf9e2530fa9695fc57d0c78adfc5e361305fe8919fe09410b17da5ca12d8')
version('1.4.3', sha256='8aff94c935e75032344b52407593392ddd4e16a88bb206984340c816d42c710e')
version('1.4.2', sha256='6d3783610379be4c5676d9236cf66276a166b5b96c18f2759e9b219758959b6b')
version('1.4.1', sha256='fbd49f75a5b0b7266378515af98db310cf6c772bf6e68bed01f38ee99b408042')
depends_on('r@3.1:', when='@1.4.3:', type=('build', 'run'))
depends_on('r-plyr@1.8.1:', type=('build', 'run'))
depends_on('r-rcpp', type=('build', 'run'))
depends_on('r-stringr', type=('build', 'run'))
| 43.678571 | 95 | 0.727719 |
7b78568da58e08a5eee2bb5ecbbe0cf669af5a8f | 1,636 | py | Python | agents/actor.py | jscriptcoder/SpaceX-Falcon-Landing-PG | 70b8286f9e770c1a96571c783f9e897ddcd3c06f | [
"MIT"
] | null | null | null | agents/actor.py | jscriptcoder/SpaceX-Falcon-Landing-PG | 70b8286f9e770c1a96571c783f9e897ddcd3c06f | [
"MIT"
] | null | null | null | agents/actor.py | jscriptcoder/SpaceX-Falcon-Landing-PG | 70b8286f9e770c1a96571c783f9e897ddcd3c06f | [
"MIT"
] | 1 | 2020-11-30T13:27:58.000Z | 2020-11-30T13:27:58.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.distributions import Normal
from .device import device
class Actor(nn.Module):
def __init__(self, state_size, action_size, hidden_size, activ, std=1.0):
super().__init__()
dims = (state_size,) + hidden_size + (action_size,)
self.layers = nn.ModuleList([nn.Linear(dim_in, dim_out) \
for dim_in, dim_out \
in zip(dims[:-1], dims[1:])])
# Learnable parameter. Holds the standard deviation value (sigma)
# of the probability distribution of a continuous action space
self.std = nn.Parameter(torch.ones(action_size) * std)
self.activ = activ
self.to(device)
def forward(self, state, action=None):
if type(state) != torch.Tensor:
state = torch.FloatTensor(state).to(device)
x = self.layers[0](state)
for layer in self.layers[1:-1]:
x = self.activ(layer(x))
mean = torch.tanh(self.layers[-1](x)) # (-1, 1)
# Always positive value.
# See https://sefiks.com/2017/08/11/softplus-as-a-neural-networks-activation-function/
std = F.softplus(self.std)
dist = Normal(mean, std)
if action is None:
action = dist.sample()
log_prob = dist.log_prob(action).sum(-1).unsqueeze(-1)
entropy = dist.entropy().sum(-1).unsqueeze(-1)
return mean, action, log_prob, entropy
| 31.461538 | 94 | 0.551956 |
dd214316ee717bb78888eeaa8babce1f1f3bf85e | 1,190 | py | Python | code and data/Optimisation.py | HaloNee/Portfolio_optimization | 5cfbdea150daf0a9cb9cb9198f1267cfa2c5ffda | [
"Apache-2.0"
] | null | null | null | code and data/Optimisation.py | HaloNee/Portfolio_optimization | 5cfbdea150daf0a9cb9cb9198f1267cfa2c5ffda | [
"Apache-2.0"
] | null | null | null | code and data/Optimisation.py | HaloNee/Portfolio_optimization | 5cfbdea150daf0a9cb9cb9198f1267cfa2c5ffda | [
"Apache-2.0"
] | null | null | null | import pathlib
import numpy as np
import pandas as pd
import datetime
import matplotlib.pyplot as plt
import portfolio as pf
# read data from files:
df_data_path = pathlib.Path.cwd()/ "data" / "close_data_201801.csv"
df_data = pd.read_csv(df_data_path, index_col="Date", parse_dates=True)
# building a portfolio
mypf = pf.build_portfolio(data=df_data)
# Set risk free rate and frequency/time window of the portfolio
mypf.risk_free_rate = 0.03
mypf.freq = 252
print("mypf.risk_free_rate = {}".format(mypf.risk_free_rate))
print("mypf.freq = {}".format(mypf.freq))
# optimisation for maximum Sharpe ratio
opt_weights = mypf.ef_maximum_sharpe_ratio(verbose=True)
opt_weights = opt_weights[opt_weights['Allocation'] > 1e-6]
opt_weights.to_csv('data/' + 'opt_weights201801.csv', index_label='index')
fig = plt.figure(figsize=(9, 6))
ax = fig.add_subplot(1, 1, 1)
# computing and plotting efficient frontier of pf
mypf.ef_plot_efrontier()
# adding markers to optimal solutions
mypf.ef_plot_optimal_portfolios()
# and adding the individual stocks to the plot
mypf.plot_stocks()
ax.legend(loc='best')
plt.grid(linestyle='-.')
ax.set_xlim([0, 0.7])
ax.set_ylim([-0.2, 0.9])
plt.show()
| 24.791667 | 74 | 0.756303 |
35720a2924773c8654d3718301b13db4d9f665b7 | 5,941 | py | Python | wechatpy/messages.py | xxiiaaon/wechatpy | 1ba34b4eb7f544e69dbc4b263ef4ac77de166d52 | [
"MIT"
] | null | null | null | wechatpy/messages.py | xxiiaaon/wechatpy | 1ba34b4eb7f544e69dbc4b263ef4ac77de166d52 | [
"MIT"
] | null | null | null | wechatpy/messages.py | xxiiaaon/wechatpy | 1ba34b4eb7f544e69dbc4b263ef4ac77de166d52 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
wechatpy.messages
~~~~~~~~~~~~~~~~~~
This module defines all the messages you can get from WeChat server
:copyright: (c) 2014 by messense.
:license: MIT, see LICENSE for more details.
"""
import copy
from wechatpy.fields import BaseField, DateTimeField, FieldDescriptor, IntegerField, StringField
MESSAGE_TYPES = {}
COMPONENT_MESSAGE_TYPES = {}
def register_message(msg_type):
def register(cls):
MESSAGE_TYPES[msg_type] = cls
return cls
return register
def register_component_message(msg_type):
def register(cls):
COMPONENT_MESSAGE_TYPES[msg_type] = cls
return cls
return register
class MessageMetaClass(type):
"""Metaclass for all messages"""
def __new__(mcs, name, bases, attrs):
for b in bases:
if not hasattr(b, "_fields"):
continue
for k, v in b.__dict__.items():
if k in attrs:
continue
if isinstance(v, FieldDescriptor):
attrs[k] = copy.deepcopy(v.field)
mcs = super().__new__(mcs, name, bases, attrs)
mcs._fields = {}
for name, field in mcs.__dict__.items():
if isinstance(field, BaseField):
field.add_to_class(mcs, name)
return mcs
class BaseMessage(metaclass=MessageMetaClass):
"""Base class for all messages and events"""
type = "unknown"
id = IntegerField("MsgId", 0)
source = StringField("FromUserName")
target = StringField("ToUserName")
create_time = DateTimeField("CreateTime")
time = IntegerField("CreateTime")
def __init__(self, message):
self._data = message
def __repr__(self):
return f"{self.__class__.__name__}({repr(self._data)})"
@register_message("text")
class TextMessage(BaseMessage):
"""
文本消息
详情请参阅
https://developers.weixin.qq.com/doc/offiaccount/Message_Management/Receiving_standard_messages.html
"""
type = "text"
content = StringField("Content")
@register_message("image")
class ImageMessage(BaseMessage):
"""
图片消息
详情请参阅
https://developers.weixin.qq.com/doc/offiaccount/Message_Management/Receiving_standard_messages.html
"""
type = "image"
media_id = StringField("MediaId")
image = StringField("PicUrl")
@register_message("voice")
class VoiceMessage(BaseMessage):
"""
语音消息
详情请参阅
https://developers.weixin.qq.com/doc/offiaccount/Message_Management/Receiving_standard_messages.html
"""
type = "voice"
media_id = StringField("MediaId")
format = StringField("Format")
recognition = StringField("Recognition")
@register_message("shortvideo")
class ShortVideoMessage(BaseMessage):
"""
短视频消息
详情请参阅
https://developers.weixin.qq.com/doc/offiaccount/Message_Management/Receiving_standard_messages.html
"""
type = "shortvideo"
media_id = StringField("MediaId")
thumb_media_id = StringField("ThumbMediaId")
@register_message("video")
class VideoMessage(BaseMessage):
"""
视频消息
详情请参阅
https://developers.weixin.qq.com/doc/offiaccount/Message_Management/Receiving_standard_messages.html
"""
type = "video"
media_id = StringField("MediaId")
thumb_media_id = StringField("ThumbMediaId")
@register_message("location")
class LocationMessage(BaseMessage):
"""
地理位置消息
详情请参阅
https://developers.weixin.qq.com/doc/offiaccount/Message_Management/Receiving_standard_messages.html
"""
type = "location"
location_x = StringField("Location_X")
location_y = StringField("Location_Y")
scale = StringField("Scale")
label = StringField("Label")
@property
def location(self):
return self.location_x, self.location_y
@register_message("link")
class LinkMessage(BaseMessage):
"""
链接消息
详情请参阅
https://developers.weixin.qq.com/doc/offiaccount/Message_Management/Receiving_standard_messages.html
"""
type = "link"
title = StringField("Title")
description = StringField("Description")
url = StringField("Url")
class UnknownMessage(BaseMessage):
"""未知消息类型"""
pass
class BaseComponentMessage(metaclass=MessageMetaClass):
"""Base class for all component messages and events"""
type = "unknown"
appid = StringField("AppId")
create_time = DateTimeField("CreateTime")
def __init__(self, message):
self._data = message
def __repr__(self):
return f"{self.__class__.__name__}({repr(self._data)})"
@register_component_message("component_verify_ticket")
class ComponentVerifyTicketMessage(BaseComponentMessage):
"""
component_verify_ticket协议
"""
type = "component_verify_ticket"
verify_ticket = StringField("ComponentVerifyTicket")
@register_component_message("unauthorized")
class ComponentUnauthorizedMessage(BaseComponentMessage):
"""
取消授权通知
"""
type = "unauthorized"
authorizer_appid = StringField("AuthorizerAppid")
@register_component_message("authorized")
class ComponentAuthorizedMessage(BaseComponentMessage):
"""
新增授权通知
"""
type = "authorized"
authorizer_appid = StringField("AuthorizerAppid")
authorization_code = StringField("AuthorizationCode")
authorization_code_expired_time = StringField("AuthorizationCodeExpiredTime")
pre_auth_code = StringField("PreAuthCode")
@register_component_message("updateauthorized")
class ComponentUpdateAuthorizedMessage(BaseComponentMessage):
"""
更新授权通知
"""
type = "updateauthorized"
authorizer_appid = StringField("AuthorizerAppid")
authorization_code = StringField("AuthorizationCode")
authorization_code_expired_time = StringField("AuthorizationCodeExpiredTime")
pre_auth_code = StringField("PreAuthCode")
class ComponentUnknownMessage(BaseComponentMessage):
"""
未知通知
"""
type = "unknown"
| 24.24898 | 104 | 0.684733 |
692e22320e882151d47fc74ee4cd04080fc1699a | 2,491 | py | Python | day22/day22.py | alexmotoc/AoC17 | c496b1da3a5863b370e677b87b6f14c65863227c | [
"MIT"
] | null | null | null | day22/day22.py | alexmotoc/AoC17 | c496b1da3a5863b370e677b87b6f14c65863227c | [
"MIT"
] | null | null | null | day22/day22.py | alexmotoc/AoC17 | c496b1da3a5863b370e677b87b6f14c65863227c | [
"MIT"
] | null | null | null | from collections import defaultdict
def turn_left(face):
"""Return the new facing position after turning left"""
updated = face
if face == 'N':
updated = 'W'
elif face == 'S':
updated = 'E'
elif face == 'E':
updated = 'N'
elif face == 'W':
updated = 'S'
return updated
def turn_right(face):
"""Return the new facing position after turning right"""
updated = face
for i in xrange(3):
updated = turn_left(updated)
return updated
def reverse(face):
"""Return the new facing position after reversing direction"""
updated = face
for i in xrange(2):
updated = turn_left(updated)
return updated
def move(face, row, col):
"""Returns the updated coordinates after moving forward
in the direction the virus is facing"""
if face == 'N':
row, col = row - 1, col
elif face == 'S':
row, col = row + 1, col
elif face == 'E':
row, col = row, col + 1
elif face == 'W':
row, col = row, col - 1
return row, col
with open('day22_input.txt') as file:
input = file.read()
input = input.split('\n')
# Part 1
grid = defaultdict(lambda: '.')
for r in xrange(len(input)):
for c in xrange(len(input[r])):
grid[(r, c)] = input[r][c]
# Start in the center of the grid
row = len(input) // 2
col = len(input[0]) // 2
# Face up (NORTH)
face = 'N'
activity_bursts = 10000
infections = 0
for i in xrange(activity_bursts):
# Turn in place
if grid[(row, col)] == '#':
face = turn_right(face)
grid[(row, col)] = '.'
else:
face = turn_left(face)
grid[(row, col)] = '#'
infections += 1
row, col = move(face, row, col)
print(infections)
# Part 2
grid = defaultdict(lambda: '.')
for r in xrange(len(input)):
for c in xrange(len(input[r])):
grid[(r, c)] = input[r][c]
# Start in the center of the grid
row = len(input) // 2
col = len(input[0]) // 2
# Face up (NORTH)
face = 'N'
activity_bursts = 10000000
infections = 0
for i in xrange(activity_bursts):
if grid[(row, col)] == '#':
face = turn_right(face)
grid[(row, col)] = 'F'
elif grid[(row, col)] == '.':
face = turn_left(face)
grid[(row, col)] = 'W'
elif grid[(row, col)] == 'F':
face = reverse(face)
grid[(row, col)] = '.'
else:
grid[(row, col)] = '#'
infections += 1
row, col = move(face, row, col)
print(infections) | 21.850877 | 66 | 0.559213 |
53f3ad20198e90239cfbdc2ba0eb13f4e69bca8b | 5,906 | py | Python | program2/kirsch_compass_gradient.py | mfsuve/ComputerVision | d5552ca90bfa949b012f4bd77ff854d2350e09e2 | [
"MIT"
] | null | null | null | program2/kirsch_compass_gradient.py | mfsuve/ComputerVision | d5552ca90bfa949b012f4bd77ff854d2350e09e2 | [
"MIT"
] | null | null | null | program2/kirsch_compass_gradient.py | mfsuve/ComputerVision | d5552ca90bfa949b012f4bd77ff854d2350e09e2 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
from matplotlib import pyplot as plt
from math import sqrt
def initFilters():
N = np.array([-3, -3, -3, -3, 0, -3, 5, 5, 5]).reshape(3, 3)
W = np.array([-3, -3, 5, -3, 0, 5, -3, -3, 5]).reshape(3, 3)
S = np.array([5, 5, 5, -3, 0, -3, -3, -3, -3]).reshape(3, 3)
E = np.array([5, -3, -3, 5, 0, -3, 5, -3, -3]).reshape(3, 3)
NW = np.array([-3, -3, -3, -3, 0, 5, -3, 5, 5]).reshape(3, 3)
SW = np.array([-3, 5, 5, -3, 0, 5, -3, -3, -3]).reshape(3, 3)
SE = np.array([5, 5, -3, 5, 0, -3, -3, -3, -3]).reshape(3, 3)
NE = np.array([-3, -3, -3, 5, 0, -3, 5, 5, -3]).reshape(3, 3)
return {'N': N, 'W': W, 'S': S, 'E': E, 'NW': NW, 'SW': SW, 'SE': SE, 'NE': NE}
def applyFilter(img, Filter):
# filterSize = 3
reverseFilter = Filter[::-1, ::-1]
h = np.size(img, axis=0)
w = np.size(img, axis=1)
newimage = np.zeros((h, w))
# Extend the size for filter to fit
img = np.append(img, [img[h-2, :]], axis=0)
img = np.append([img[1, :]], img, axis=0)
img = np.append(img, img[:, w-2].reshape(h+2, 1), axis=1)
img = np.append(img[:, 1].reshape(h+2, 1), img, axis=1)
# Filtering
for x in range(1, w+1):
for y in range(1, h+1):
val = np.sum(np.multiply(img[x - 1:x + 2, y - 1:y + 2], reverseFilter))
newimage[x - 1, y - 1] = max(0, val)
return newimage
def maxGradient(img, filters):
h = np.size(img, axis=0)
w = np.size(img, axis=1)
images = np.zeros((len(filters.values()), h, w))
i = 0
for f in filters.values():
images[i] = applyFilter(img, f)
i += 1
maxResponse = np.zeros((h, w))
for x in range(h):
for y in range(w):
val = np.max(images[:, x, y])
maxResponse[x, y] = val > 400
return maxResponse
def direction(img, filters):
h = np.size(img, axis=0)
w = np.size(img, axis=1)
global stepSize
images = np.zeros((len(filters.values()) + 1, h, w))
images[1] = applyFilter(img, filters.get('N'))
images[2] = applyFilter(img, filters.get('E'))
images[3] = applyFilter(img, filters.get('S'))
images[4] = applyFilter(img, filters.get('W'))
images[5] = applyFilter(img, filters.get('NE'))
images[6] = applyFilter(img, filters.get('SE'))
images[7] = applyFilter(img, filters.get('NW'))
images[8] = applyFilter(img, filters.get('SW'))
U = np.zeros((h, w))
V = np.zeros((h, w))
dir = np.zeros((h, w))
for x in range(h):
for y in range(w):
values = images[:, x, y]
val = np.max(values)
index = np.argmax(values)
dir[x, y] = index if val > 400 else 0
if dir[x, y] == 1:
V[x, y] = 1
elif dir[x, y] == 2:
U[x, y] = 1
elif dir[x, y] == 3:
V[x, y] = -1
elif dir[x, y] == 4:
U[x, y] = -1
elif dir[x, y] == 5:
U[x, y] = sqrt(2) / 2
V[x, y] = sqrt(2) / 2
elif dir[x, y] == 6:
U[x, y] = sqrt(2) / 2
V[x, y] = -sqrt(2) / 2
elif dir[x, y] == 7:
U[x, y] = -sqrt(2) / 2
V[x, y] = sqrt(2) / 2
elif dir[x, y] == 8:
U[x, y] = -sqrt(2) / 2
V[x, y] = -sqrt(2) / 2
return dir, U[stepSize::stepSize, stepSize::stepSize], V[stepSize::stepSize, stepSize::stepSize]
def gradient(img):
h = np.size(img, axis=0)
w = np.size(img, axis=1)
global stepSize
gradients = np.zeros((h, w))
U = np.zeros((h, w))
V = np.zeros((h, w))
for x in range(1, h-1):
for y in range(1, w-1):
u = (int(img[x+1, y]) - int(img[x-1, y])) / 2
v = (int(img[x, y+1]) - int(img[x, y-1])) / 2
val = sqrt(u**2 + v**2)
if val > 15:
gradients[x, y] = val
U[x, y] = u
V[x, y] = v
### This part was used without the treshold
### To see the histogram of the gradient values
# m = np.max(gradients)
# a = np.arange(m + 1)
# c = gradients.reshape(h*w)
# b, x = np.histogram(c, np.size(a))
# plt.plot(a, b)
# plt.show()
return gradients, U[stepSize::stepSize, stepSize::stepSize], V[stepSize::stepSize, stepSize::stepSize]
# Program
I = cv2.imread('images/StairsBuildingsN.png', 0)
filters = initFilters()
for f in filters.keys():
plt.imshow(applyFilter(I, filters.get(f)), cmap='gray')
plt.title('%s filter' % f)
plt.show()
Jmag = maxGradient(I, filters)
plt.imshow(Jmag, cmap='gray')
plt.title('Max Gradient')
plt.show()
global stepSize
stepSize = 4
Jdir, U, V = direction(I, filters)
X, Y = np.meshgrid(np.arange(stepSize, 512, stepSize), np.arange(stepSize, 512, stepSize))
plt.imshow(Jmag, cmap='gray')
plt.quiver(X, Y, U, V, scale=30, color='r', units='width')
plt.title('Vector Direction')
plt.show()
Imag, U, V = gradient(I)
plt.imshow(Imag, cmap='gray')
plt.quiver(X, Y, V, U, scale=1750, color='r', units='width')
plt.title('Gradient Vector Direction')
plt.show()
I = applyFilter(I, np.ones((3, 3)) / 9)
plt.imshow(I, cmap='gray')
plt.title('I with Gaussian Filter')
plt.show()
for f in filters.keys():
plt.imshow(applyFilter(I, filters.get(f)), cmap='gray')
plt.title('%s filter' % f)
plt.show()
Jmag = maxGradient(I, filters)
plt.imshow(Jmag, cmap='gray')
plt.title('Max Gradient')
plt.show()
Jdir, U, V = direction(I, filters)
X, Y = np.meshgrid(np.arange(stepSize, 512, stepSize), np.arange(stepSize, 512, stepSize))
plt.imshow(Jmag, cmap='gray')
plt.quiver(X, Y, U, V, scale=30, color='r', units='width')
plt.title('Vector Direction')
plt.show()
Imag, U, V = gradient(I)
plt.imshow(Imag, cmap='gray')
plt.quiver(X, Y, V, U, scale=1750, color='r', units='width')
plt.title('Gradient Vector Direction')
plt.show()
| 29.979695 | 106 | 0.525737 |
5cc3a0e18b236c8dcf2155b00c502133d2d6c755 | 4,103 | py | Python | wpa_supplicant/examples/p2p/p2p_flush.py | byu343/sonic-wpa-supplicant | 447e9f708df996b8de88ce4fe703c0901357e12b | [
"Unlicense"
] | 1,104 | 2021-05-11T18:45:36.000Z | 2022-03-30T22:39:34.000Z | wpa_supplicant/examples/p2p/p2p_flush.py | byu343/sonic-wpa-supplicant | 447e9f708df996b8de88ce4fe703c0901357e12b | [
"Unlicense"
] | 44 | 2021-05-12T01:12:58.000Z | 2022-03-31T07:09:52.000Z | wpa_supplicant/examples/p2p/p2p_flush.py | byu343/sonic-wpa-supplicant | 447e9f708df996b8de88ce4fe703c0901357e12b | [
"Unlicense"
] | 168 | 2021-05-11T22:02:54.000Z | 2022-03-24T03:21:51.000Z | #!/usr/bin/python
# Tests P2P_Flush
# Will flush the p2p interface
# Then Program will exit
######### MAY NEED TO RUN AS SUDO #############
import dbus
import sys, os
import time
import gobject
import threading
import getopt
from dbus.mainloop.glib import DBusGMainLoop
def usage():
print("Usage:")
print(" %s -i <interface_name> \ " \
% sys.argv[0])
print(" [-w <wpas_dbus_interface>]")
print("Options:")
print(" -i = interface name")
print(" -w = wpas dbus interface = fi.w1.wpa_supplicant1")
print("Example:")
print(" %s -i wlan0" % sys.argv[0])
# Required Signals\
def deviceLost(devicepath):
print("Device lost: %s" % (devicepath))
class P2P_Flush (threading.Thread):
# Needed Variables
global bus
global wpas_object
global interface_object
global p2p_interface
global interface_name
global wpas
global wpas_dbus_interface
global path
global timeout
# Dbus Paths
global wpas_dbus_opath
global wpas_dbus_interfaces_opath
global wpas_dbus_interfaces_interface
global wpas_dbus_interfaces_p2pdevice
# Constructor
def __init__(self,interface_name,wpas_dbus_interface,timeout):
# Initializes variables and threads
self.interface_name = interface_name
self.wpas_dbus_interface = wpas_dbus_interface
self.timeout = timeout
# Initializes thread and daemon allows for ctrl-c kill
threading.Thread.__init__(self)
self.daemon = True
# Generating interface/object paths
self.wpas_dbus_opath = "/" + \
self.wpas_dbus_interface.replace(".","/")
self.wpas_wpas_dbus_interfaces_opath = self.wpas_dbus_opath + \
"/Interfaces"
self.wpas_dbus_interfaces_interface = \
self.wpas_dbus_interface + ".Interface"
self.wpas_dbus_interfaces_p2pdevice = \
self.wpas_dbus_interfaces_interface \
+ ".P2PDevice"
# Getting interfaces and objects
DBusGMainLoop(set_as_default=True)
self.bus = dbus.SystemBus()
self.wpas_object = self.bus.get_object(
self.wpas_dbus_interface,
self.wpas_dbus_opath)
self.wpas = dbus.Interface(self.wpas_object,
self.wpas_dbus_interface)
# Try to see if supplicant knows about interface
# If not, throw an exception
try:
self.path = self.wpas.GetInterface(
self.interface_name)
except dbus.DBusException as exc:
error = 'Error:\n Interface ' + self.interface_name \
+ ' was not found'
print(error)
usage()
os._exit(0)
self.interface_object = self.bus.get_object(
self.wpas_dbus_interface, self.path)
self.p2p_interface = dbus.Interface(self.interface_object,
self.wpas_dbus_interfaces_p2pdevice)
# Signals
self.bus.add_signal_receiver(deviceLost,
dbus_interface=self.wpas_dbus_interfaces_p2pdevice,
signal_name="DeviceLost")
# Runs p2p_flush
def run(self):
# Allows other threads to keep working while MainLoop runs
# Required for timeout implementation
gobject.MainLoop().get_context().iteration(True)
gobject.threads_init()
self.p2p_interface.Flush()
gobject.MainLoop().run()
if __name__ == "__main__":
# Needed to show which devices were lost
timeout = 5
# Defaults for optional inputs
wpas_dbus_interface = 'fi.w1.wpa_supplicant1'
# interface_name is required
interface_name = None
# Using getopts to handle options
try:
options, args = getopt.getopt(sys.argv[1:],"hi:w:")
except getopt.GetoptError:
usage()
quit()
# If there's a switch, override default option
for key, value in options:
# Help
if (key == "-h"):
usage()
quit()
# Interface Name
elif (key == "-i"):
interface_name = value
# Dbus interface
elif (key == "-w"):
wpas_dbus_interface = value
else:
assert False, "unhandled option"
# Interface name is required and was not given
if (interface_name == None):
print("Error:\n interface_name is required")
usage()
quit()
# Constructor
try:
p2p_flush_test = P2P_Flush(interface_name, wpas_dbus_interface,timeout)
except:
print("Error:\n Invalid wpas_dbus_interface")
usage()
quit()
# Start P2P_Find
p2p_flush_test.start()
try:
time.sleep(int(p2p_flush_test.timeout))
except:
pass
print("p2p_flush complete")
quit()
| 24.278107 | 73 | 0.728248 |
5b4286db8146d05bb9275efce34062e0221d6936 | 26,704 | py | Python | src/transformers/generation_logits_process.py | severinsimmler/transformers | 1a3e0c4fe6868b4eb1105dfe601a79d7e5d11a0f | [
"Apache-2.0"
] | 1 | 2021-03-31T02:23:56.000Z | 2021-03-31T02:23:56.000Z | src/transformers/generation_logits_process.py | severinsimmler/transformers | 1a3e0c4fe6868b4eb1105dfe601a79d7e5d11a0f | [
"Apache-2.0"
] | null | null | null | src/transformers/generation_logits_process.py | severinsimmler/transformers | 1a3e0c4fe6868b4eb1105dfe601a79d7e5d11a0f | [
"Apache-2.0"
] | 1 | 2020-11-02T06:37:04.000Z | 2020-11-02T06:37:04.000Z | # coding=utf-8
# Copyright 2020 The HuggingFace Inc. team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import math
from abc import ABC
from typing import Callable, Iterable, List
import numpy as np
import torch
from .file_utils import add_start_docstrings
from .utils.logging import get_logger
logger = get_logger(__name__)
LOGITS_PROCESSOR_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.BertTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
scores (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, config.vocab_size)`):
Prediction scores of a language modeling head. These can be scores for each vocabulary token before SoftMax
or scores for each vocabulary token after SoftMax.
kwargs:
Additional logits processor specific kwargs.
Return:
:obj:`torch.FloatTensor` of shape :obj:`(batch_size, config.vocab_size)`: The processed prediction scores.
"""
class LogitsProcessor(ABC):
"""Abstract base class for all logit processors that can be applied during generation."""
@add_start_docstrings(LOGITS_PROCESSOR_INPUTS_DOCSTRING)
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
"""Torch method for processing logits."""
raise NotImplementedError(
f"{self.__class__} is an abstract class. Only classes inheriting this class can be called."
)
class LogitsWarper(ABC):
"""Abstract base class for all logit warpers that can be applied during generation with multinomial sampling."""
@add_start_docstrings(LOGITS_PROCESSOR_INPUTS_DOCSTRING)
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
"""Torch method for warping logits."""
raise NotImplementedError(
f"{self.__class__} is an abstract class. Only classes inheriting this class can be called."
)
class LogitsProcessorList(list):
"""
This class can be used to create a list of :class:`~transformers.LogitsProcessor` or
:class:`~transformers.LogitsWarper` to subsequently process a :obj:`scores` input tensor. This class inherits from
list and adds a specific `__call__` method to apply each :class:`~transformers.LogitsProcessor` or
:class:`~transformers.LogitsProcessor` to the inputs.
"""
@add_start_docstrings(LOGITS_PROCESSOR_INPUTS_DOCSTRING)
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> torch.FloatTensor:
for processor in self:
function_args = inspect.signature(processor.__call__).parameters
if len(function_args) > 2:
assert all(
arg in kwargs for arg in list(function_args.keys())[2:]
), f"Make sure that all the required parameters: {list(function_args.keys())} for {processor.__class__} are passed to the logits processor."
scores = processor(input_ids, scores, **kwargs)
else:
scores = processor(input_ids, scores)
return scores
class MinLengthLogitsProcessor(LogitsProcessor):
r"""
:class:`transformers.LogitsProcessor` enforcing a min-length by setting EOS probability to 0.
Args:
min_length (:obj:`int`):
The minimum length below which the score of :obj:`eos_token_id` is set to :obj:`-float("Inf")`.
eos_token_id (:obj:`int`):
The id of the `end-of-sequence` token.
"""
def __init__(self, min_length: int, eos_token_id: int):
if not isinstance(min_length, int) or min_length < 0:
raise ValueError(f"`min_length` has to be a positive integer, but is {min_length}")
if not isinstance(eos_token_id, int) or eos_token_id < 0:
raise ValueError(f"`eos_token_id` has to be a positive integer, but is {eos_token_id}")
self.min_length = min_length
self.eos_token_id = eos_token_id
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
cur_len = input_ids.shape[-1]
if cur_len < self.min_length:
scores[:, self.eos_token_id] = -float("inf")
return scores
class TemperatureLogitsWarper(LogitsWarper):
r"""
:class:`transformers.LogitsWarper` for temperature (exponential scaling output probability distribution).
Args:
temperature (:obj:`float`):
The value used to module the logits distribution.
"""
def __init__(self, temperature: float):
if not isinstance(temperature, float) or not (temperature > 0):
raise ValueError(f"`temperature` has to be a strictly positive float, but is {temperature}")
self.temperature = temperature
def __call__(self, input_ids: torch.Tensor, scores: torch.Tensor) -> torch.Tensor:
scores = scores / self.temperature
return scores
class RepetitionPenaltyLogitsProcessor(LogitsProcessor):
r"""
:class:`transformers.LogitsProcessor` enforcing an exponential penalty on repeated sequences.
Args:
repetition_penalty (:obj:`float`):
The parameter for repetition penalty. 1.0 means no penalty. See `this paper
<https://arxiv.org/pdf/1909.05858.pdf>`__ for more details.
"""
def __init__(self, penalty: float):
if not isinstance(penalty, float) or not (penalty > 0):
raise ValueError(f"`penalty` has to be a strictly positive float, but is {penalty}")
self.penalty = penalty
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
score = torch.gather(scores, 1, input_ids)
# if score < 0 then repetition penalty has to be multiplied to reduce the previous token probability
score = torch.where(score < 0, score * self.penalty, score / self.penalty)
scores.scatter_(1, input_ids, score)
return scores
class TopPLogitsWarper(LogitsWarper):
"""
:class:`transformers.LogitsWarper` that performs top-p, i.e. restricting to top tokens summing to prob_cut_off <=
prob_cut_off.
Args:
top_p (:obj:`float`):
If set to < 1, only the most probable tokens with probabilities that add up to :obj:`top_p` or higher are
kept for generation.
filter_value (:obj:`float`, `optional`, defaults to :obj:`-float("Inf")`):
All filtered values will be set to this float value.
min_tokens_to_keep (:obj:`int`, `optional`, defaults to 1):
Minimum number of tokens that cannot be filtered.
"""
def __init__(self, top_p: float, filter_value: float = -float("Inf"), min_tokens_to_keep: int = 1):
if not isinstance(top_p, float) or (top_p < 0 or top_p > 1.0):
raise ValueError(f"`top_p` has to be a float > 0 and < 1, but is {top_p}")
self.top_p = top_p
self.filter_value = filter_value
self.min_tokens_to_keep = min_tokens_to_keep
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
sorted_logits, sorted_indices = torch.sort(scores, descending=True)
cumulative_probs = sorted_logits.softmax(dim=-1).cumsum(dim=-1)
# Remove tokens with cumulative top_p above the threshold (token with 0 are kept)
sorted_indices_to_remove = cumulative_probs > self.top_p
if self.min_tokens_to_keep > 1:
# Keep at least min_tokens_to_keep (set to min_tokens_to_keep-1 because we add the first one below)
sorted_indices_to_remove[..., : self.min_tokens_to_keep - 1] = 0
# Shift the indices to the right to keep also the first token above the threshold
sorted_indices_to_remove[..., 1:] = sorted_indices_to_remove[..., :-1].clone()
sorted_indices_to_remove[..., 0] = 0
# scatter sorted tensors to original indexing
indices_to_remove = sorted_indices_to_remove.scatter(1, sorted_indices, sorted_indices_to_remove)
scores = scores.masked_fill(indices_to_remove, self.filter_value)
return scores
class TopKLogitsWarper(LogitsWarper):
r"""
:class:`transformers.LogitsWarper` that performs top-k, i.e. restricting to the k highest probability elements.
Args:
top_k (:obj:`int`):
The number of highest probability vocabulary tokens to keep for top-k-filtering.
filter_value (:obj:`float`, `optional`, defaults to :obj:`-float("Inf")`):
All filtered values will be set to this float value.
min_tokens_to_keep (:obj:`int`, `optional`, defaults to 1):
Minimum number of tokens that cannot be filtered.
"""
def __init__(self, top_k: int, filter_value: float = -float("Inf"), min_tokens_to_keep: int = 1):
if not isinstance(top_k, int) or top_k <= 0:
raise ValueError(f"`top_k` has to be a strictly positive integer, but is {top_k}")
self.top_k = top_k
self.filter_value = filter_value
self.min_tokens_to_keep = min_tokens_to_keep
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
top_k = min(max(self.top_k, self.min_tokens_to_keep), scores.size(-1)) # Safety check
# Remove all tokens with a probability less than the last token of the top-k
indices_to_remove = scores < torch.topk(scores, top_k)[0][..., -1, None]
scores = scores.masked_fill(indices_to_remove, self.filter_value)
return scores
def _get_ngrams(ngram_size: int, prev_input_ids: torch.Tensor, num_hypos: int):
generated_ngrams = [{} for _ in range(num_hypos)]
for idx in range(num_hypos):
gen_tokens = prev_input_ids[idx].tolist()
generated_ngram = generated_ngrams[idx]
for ngram in zip(*[gen_tokens[i:] for i in range(ngram_size)]):
prev_ngram_tuple = tuple(ngram[:-1])
generated_ngram[prev_ngram_tuple] = generated_ngram.get(prev_ngram_tuple, []) + [ngram[-1]]
return generated_ngrams
def _get_generated_ngrams(banned_ngrams, prev_input_ids, ngram_size, cur_len):
# Before decoding the next token, prevent decoding of ngrams that have already appeared
start_idx = cur_len + 1 - ngram_size
ngram_idx = tuple(prev_input_ids[start_idx:cur_len].tolist())
return banned_ngrams.get(ngram_idx, [])
def _calc_banned_ngram_tokens(
ngram_size: int, prev_input_ids: torch.Tensor, num_hypos: int, cur_len: int
) -> List[Iterable[int]]:
"""Copied from fairseq for no_repeat_ngram in beam_search"""
if cur_len + 1 < ngram_size:
# return no banned tokens if we haven't generated no_repeat_ngram_size tokens yet
return [[] for _ in range(num_hypos)]
generated_ngrams = _get_ngrams(ngram_size, prev_input_ids, num_hypos)
banned_tokens = [
_get_generated_ngrams(generated_ngrams[hypo_idx], prev_input_ids[hypo_idx], ngram_size, cur_len)
for hypo_idx in range(num_hypos)
]
return banned_tokens
class NoRepeatNGramLogitsProcessor(LogitsProcessor):
r"""
:class:`transformers.LogitsProcessor` that enforces no repetition of n-grams. See `Fairseq
<https://github.com/pytorch/fairseq/blob/a07cb6f40480928c9e0548b737aadd36ee66ac76/fairseq/sequence_generator.py#L345>`__.
Args:
ngram_size (:obj:`int`):
All ngrams of size :obj:`ngram_size` can only occur once.
"""
def __init__(self, ngram_size: int):
if not isinstance(ngram_size, int) or ngram_size <= 0:
raise ValueError(f"`ngram_size` has to be a strictly positive integer, but is {ngram_size}")
self.ngram_size = ngram_size
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
num_batch_hypotheses = scores.shape[0]
cur_len = input_ids.shape[-1]
banned_batch_tokens = _calc_banned_ngram_tokens(self.ngram_size, input_ids, num_batch_hypotheses, cur_len)
for i, banned_tokens in enumerate(banned_batch_tokens):
scores[i, banned_tokens] = -float("inf")
return scores
class EncoderNoRepeatNGramLogitsProcessor(LogitsProcessor):
r"""
:class:`transformers.LogitsProcessor` that enforces no repetition of encoder input ids n-grams for the decoder ids.
See `ParlAI <https://github.com/facebookresearch/ParlAI/blob/master/parlai/core/torch_generator_agent.py#L1350>`__.
Args:
encoder_ngram_size (:obj:`int`):
All ngrams of size :obj:`ngram_size` can only occur within the encoder input ids.
encoder_input_ids (:obj:`int`):
The encoder_input_ids that should not be repeated within the decoder ids.
"""
def __init__(self, encoder_ngram_size: int, encoder_input_ids: torch.LongTensor):
if not isinstance(encoder_ngram_size, int) or encoder_ngram_size <= 0:
raise ValueError(
f"`encoder_ngram_size` has to be a strictly positive integer, but is {encoder_ngram_size}"
)
self.ngram_size = encoder_ngram_size
if len(encoder_input_ids.shape) == 1:
encoder_input_ids = encoder_input_ids.unsqueeze(0)
self.batch_size = encoder_input_ids.shape[0]
self.generated_ngrams = _get_ngrams(encoder_ngram_size, encoder_input_ids, self.batch_size)
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
# B x num_beams
num_hypos = scores.shape[0]
num_beams = num_hypos // self.batch_size
cur_len = input_ids.shape[-1]
banned_batch_tokens = [
_get_generated_ngrams(
self.generated_ngrams[hypo_idx // num_beams], input_ids[hypo_idx], self.ngram_size, cur_len
)
for hypo_idx in range(num_hypos)
]
for i, banned_tokens in enumerate(banned_batch_tokens):
scores[i, banned_tokens] = -float("inf")
return scores
class NoBadWordsLogitsProcessor(LogitsProcessor):
"""
:class:`transformers.LogitsProcessor` that enforces that specified sequences will never be sampled.
Args:
bad_words_ids (:obj:`List[List[int]]`):
List of list of token ids that are not allowed to be generated. In order to get the tokens of the words
that should not appear in the generated text, use :obj:`tokenizer(bad_word,
add_prefix_space=True).input_ids`.
eos_token_id (:obj:`int`):
The id of the `end-of-sequence` token.
"""
def __init__(self, bad_words_ids: Iterable[Iterable[int]], eos_token_id: int):
if not isinstance(bad_words_ids, List) or len(bad_words_ids) == 0:
raise ValueError(f"`bad_words_ids` has to be a non-emtpy list, but is {bad_words_ids}.")
if any(not isinstance(bad_word_ids, list) for bad_word_ids in bad_words_ids):
raise ValueError(f"`bad_words_ids` has to be a list of lists, but is {bad_words_ids}.")
if any(
any((not isinstance(token_id, (int, np.integer)) or token_id < 0) for token_id in bad_word_ids)
for bad_word_ids in bad_words_ids
):
raise ValueError(
f"Each list in `bad_words_ids` has to be a list of positive integers, but is {bad_words_ids}."
)
self.bad_words_ids = list(filter(lambda bad_token_seq: bad_token_seq != [eos_token_id], bad_words_ids))
for banned_token_seq in self.bad_words_ids:
assert len(banned_token_seq) > 0, "Banned words token sequences {} cannot have an empty list".format(
bad_words_ids
)
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
banned_tokens = self._calc_banned_bad_words_ids(input_ids)
scores = self._set_scores_to_inf_for_banned_tokens(scores, banned_tokens)
return scores
def _tokens_match(self, prev_tokens: torch.LongTensor, tokens: List[int]) -> bool:
if len(tokens) == 0:
# if bad word tokens is just one token always ban it
return True
elif len(tokens) > len(prev_tokens):
# if bad word tokens are longer then prev input_ids they can't be equal
return False
elif prev_tokens[-len(tokens) :].tolist() == tokens:
# if tokens match
return True
else:
return False
def _calc_banned_bad_words_ids(self, prev_input_ids: Iterable[int]) -> Iterable[int]:
banned_tokens = []
for prev_input_ids_slice in prev_input_ids:
banned_tokens_slice = []
for banned_token_seq in self.bad_words_ids:
if self._tokens_match(prev_input_ids_slice, banned_token_seq[:-1]) is False:
# if tokens do not match continue
continue
banned_tokens_slice.append(banned_token_seq[-1])
banned_tokens.append(banned_tokens_slice)
return banned_tokens
def _set_scores_to_inf_for_banned_tokens(self, scores: torch.Tensor, banned_tokens: List[List[int]]) -> None:
"""
Modifies the scores in place by setting the banned token positions to `-inf`. Banned token is expected to be a
list of list of banned tokens to ban in the format [[batch index, vocabulary position],...
Args:
scores: logits distribution of shape (batch size, vocabulary size)
banned_tokens: list of list of tokens to ban of length (batch_size)
"""
banned_mask_list = []
for idx, batch_banned_tokens in enumerate(banned_tokens):
for token in batch_banned_tokens:
# Eliminates invalid bad word IDs that are over the vocabulary size.
if token <= scores.shape[1]:
banned_mask_list.append([idx, token])
else:
logger.error(
f"An invalid bad word ID is defined: {token}. This ID is not contained in the"
f"vocabulary, and is therefore ignored."
)
if not banned_mask_list:
return scores
banned_mask = torch.LongTensor(banned_mask_list)
indices = torch.ones(len(banned_mask))
# A sparse tensor is generated from a list of coordinates: [[0, 1], [0, 2], [2, 0]]. A conversion to dense tensor generates:
# [ 0 1 1 ]
# [ 0 0 0 ]
# [ 1 0 0 ]
banned_mask = (
torch.sparse.LongTensor(banned_mask.t(), indices, scores.size()).to(scores.device).to_dense().bool()
)
scores = scores.masked_fill(banned_mask, -float("inf"))
return scores
class PrefixConstrainedLogitsProcessor(LogitsProcessor):
r"""
:class:`transformers.LogitsProcessor` that enforces contrained generation and is useful for prefix-conditioned
constrained generation. See `Autoregressive Entity Retrieval <https://arxiv.org/abs/2010.00904>`__ for more
information.
Args:
prefix_allowed_tokens_fn: (:obj:`Callable[[int, torch.Tensor], List[int]]`):
This function constraints the beam search to allowed tokens only at each step. This function takes 2
arguments :obj:`inputs_ids` and the batch ID :obj:`batch_id`. It has to return a list with the allowed
tokens for the next generation step conditioned on the previously generated tokens :obj:`inputs_ids` and
the batch ID :obj:`batch_id`.
"""
def __init__(self, prefix_allowed_tokens_fn: Callable[[int, torch.Tensor], List[int]], num_beams: int):
self._prefix_allowed_tokens_fn = prefix_allowed_tokens_fn
self._num_beams = num_beams
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
mask = torch.full_like(scores, -math.inf)
for batch_id, beam_sent in enumerate(input_ids.view(-1, self._num_beams, input_ids.shape[-1])):
for beam_id, sent in enumerate(beam_sent):
mask[batch_id * self._num_beams + beam_id, self._prefix_allowed_tokens_fn(batch_id, sent)] = 0
return scores + mask
class HammingDiversityLogitsProcessor(LogitsProcessor):
r"""
:class:`transformers.LogitsProcessor` that enforces diverse beam search. Note that this logits processor is only
effective for :meth:`transformers.PretrainedModel.group_beam_search`. See `Diverse Beam Search: Decoding Diverse
Solutions from Neural Sequence Models <https://arxiv.org/pdf/1610.02424.pdf>`__ for more details.
Args:
diversity_penalty (:obj:`float`):
This value is subtracted from a beam's score if it generates a token same as any beam from other group at a
particular time. Note that :obj:`diversity_penalty` is only effective if ``group beam search`` is enabled.
num_beams (:obj:`int`):
Number of beams used for group beam search. See `this paper <https://arxiv.org/pdf/1610.02424.pdf>`__ for
more details.
num_beam_groups (:obj:`int`):
Number of groups to divide :obj:`num_beams` into in order to ensure diversity among different groups of
beams. See `this paper <https://arxiv.org/pdf/1610.02424.pdf>`__ for more details.
"""
def __init__(self, diversity_penalty: float, num_beams: int, num_beam_groups: int):
if not isinstance(diversity_penalty, float) or (not diversity_penalty > 0.0):
raise ValueError("`diversity_penalty` should be a float strictly larger than 0.")
self._diversity_penalty = diversity_penalty
if not isinstance(num_beams, int) or num_beams < 2:
raise ValueError("`num_beams` should be an integer strictly larger than 1.")
self._num_beams = num_beams
if not isinstance(num_beam_groups, int) or num_beam_groups < 2:
raise ValueError("`num_beam_groups` should be an integer strictly larger than 1.")
if num_beam_groups > num_beams:
raise ValueError("`beam_groups` has to be smaller or equal to `num_beams`.")
self._num_sub_beams = num_beams // num_beam_groups
def __call__(
self,
input_ids: torch.LongTensor,
scores: torch.FloatTensor,
current_tokens: torch.LongTensor,
beam_group_idx: int,
) -> torch.FloatTensor:
# hamming diversity: penalise using same token in current group which was used in previous groups at
# the same time step
batch_size = current_tokens.shape[0] // self._num_beams
group_start_idx = beam_group_idx * self._num_sub_beams
group_end_idx = min(group_start_idx + self._num_sub_beams, self._num_beams)
group_size = group_end_idx - group_start_idx
vocab_size = scores.shape[-1]
if group_start_idx == 0:
return scores
for batch_idx in range(batch_size):
# predicted tokens of last time step of previous groups
previous_group_tokens = current_tokens[
batch_idx * self._num_beams : batch_idx * self._num_beams + group_start_idx
]
token_frequency = torch.bincount(previous_group_tokens, minlength=vocab_size).to(scores.device)
scores[batch_idx * group_size : (batch_idx + 1) * group_size] -= self._diversity_penalty * token_frequency
return scores
class ForcedBOSTokenLogitsProcessor(LogitsProcessor):
r"""
:class:`~transformers.LogitsProcessor` that enforces the specified token as the first generated token.
Args:
bos_token_id (:obj:`int`):
The id of the token to force as the first generated token.
"""
def __init__(self, bos_token_id: int):
self.bos_token_id = bos_token_id
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
cur_len = input_ids.shape[-1]
if cur_len == 1:
num_tokens = scores.shape[1]
scores[:, [i for i in range(num_tokens) if i != self.bos_token_id]] = -float("inf")
scores[:, self.bos_token_id] = 0
return scores
class ForcedEOSTokenLogitsProcessor(LogitsProcessor):
r"""
:class:`~transformers.LogitsProcessor` that enforces the specified token as the last generated token when
:obj:`max_length` is reached.
Args:
max_length (:obj:`int`):
The maximum length of the sequence to be generated.
eos_token_id (:obj:`int`):
The id of the token to force as the last generated token when :obj:`max_length` is reached.
"""
def __init__(self, max_length: int, eos_token_id: int):
self.max_length = max_length
self.eos_token_id = eos_token_id
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
cur_len = input_ids.shape[-1]
if cur_len == self.max_length - 1:
num_tokens = scores.shape[1]
scores[:, [i for i in range(num_tokens) if i != self.eos_token_id]] = -float("inf")
scores[:, self.eos_token_id] = 0
return scores
class InfNanRemoveLogitsProcessor(LogitsProcessor):
r"""
:class:`~transformers.LogitsProcessor` that removes all :obj:`nan` and :obj:`inf` values to avoid the generation
method to fail. Note that using the logits processor should only be used if necessary since it can slow down the
generation method. :obj:`max_length` is reached.
"""
def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
# set all nan values to 0.0
scores[scores != scores] = 0.0
# set all inf values to max possible value
scores[scores == float("inf")] = torch.finfo(scores.dtype).max
return scores
| 44.730318 | 156 | 0.673907 |
f93379a5c6fcf46c34f07c556e3c89e502141bd4 | 308 | py | Python | treebankanalytics/formatters/csvformatter.py | Cocophotos/TreebankAnalytics | cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01 | [
"MIT"
] | 2 | 2015-10-28T21:12:36.000Z | 2016-09-08T14:00:41.000Z | treebankanalytics/formatters/csvformatter.py | Cocophotos/TreebankAnalytics | cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01 | [
"MIT"
] | null | null | null | treebankanalytics/formatters/csvformatter.py | Cocophotos/TreebankAnalytics | cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01 | [
"MIT"
] | null | null | null | import io
__all__ = ['CSVFormatter']
class CSVFormatter(object):
def __init__(self):
pass
def format(self, table):
output = io.StringIO()
for row in table:
print('\t'.join(row), file=output)
c = output.getvalue()
output.close()
return c
| 19.25 | 46 | 0.558442 |
fdc5769788f9cf5d4563284bc87a4b39ba1bae64 | 2,518 | py | Python | test/check-exercises.py | kishankj/python | 82042de746128127502e109111e6c4e8ab002af6 | [
"MIT"
] | 1 | 2020-11-02T10:40:42.000Z | 2020-11-02T10:40:42.000Z | test/check-exercises.py | kishankj/python | 82042de746128127502e109111e6c4e8ab002af6 | [
"MIT"
] | 1 | 2019-07-23T14:55:16.000Z | 2019-07-23T16:58:25.000Z | test/check-exercises.py | kishankj/python | 82042de746128127502e109111e6c4e8ab002af6 | [
"MIT"
] | 1 | 2020-11-02T10:40:06.000Z | 2020-11-02T10:40:06.000Z | #!/usr/bin/env python3
import os
import glob
import shutil
import subprocess
import sys
import tempfile
import json
# Allow high-performance tests to be skipped
ALLOW_SKIP = ['alphametics', 'largest-series-product']
def check_assignment(name, test_file):
# Returns the exit code of the tests
workdir = tempfile.mkdtemp(name)
example_name = name.replace("-", "_")
try:
test_file_out = os.path.join(workdir, os.path.basename(test_file))
if name in ALLOW_SKIP:
shutil.copyfile(test_file, test_file_out)
else:
with open(test_file, 'r') as src_file:
lines = [line for line in src_file.readlines()
if not line.strip().startswith('@unittest.skip')]
with open(test_file_out, 'w') as dst_file:
dst_file.writelines(lines)
shutil.copyfile(os.path.join(os.path.dirname(test_file), 'example.py'),
os.path.join(workdir, '{}.py'.format(example_name)))
return subprocess.call([sys.executable, test_file_out])
finally:
shutil.rmtree(workdir)
def load_config():
try:
with open('./config.json') as json_file:
data = json.load(json_file)
except IOError:
print('FAIL: config.json file not found')
raise SystemExit(1)
try:
problems = [entry['slug'] for entry in data['exercises']
if "deprecated" not in entry]
except KeyError:
print('FAIL: config.json has an incorrect format')
raise SystemExit(1)
return problems
def main():
if len(sys.argv) >= 2:
# test specific exercises
exercises = [exercise.strip('/') for exercise in sys.argv[1:]]
else:
# load exercises from config-file
exercises = load_config()
failures = []
for exercise in exercises:
test_file = glob.glob('./exercises/{}/*_test.py'.format(exercise))
print('# ', exercise)
if not test_file:
print('FAIL: File with test cases not found')
failures.append('{} (FileNotFound)'.format(exercise))
else:
if check_assignment(exercise, test_file[0]):
failures.append('{} (TestFailed)'.format(exercise))
print('')
print('TestEnvironment:', sys.executable.capitalize(), '\n\n')
if failures:
print('FAILURES: ', ', '.join(failures))
raise SystemExit(1)
else:
print('SUCCESS!')
if __name__ == '__main__':
main()
| 29.623529 | 79 | 0.603257 |
6dfe1f353923cc15e0b3df4a53aa5c877aece6b2 | 284 | py | Python | tweaktime/conftest.py | black-redoc/tweaktime | a185990760a007a72f73eea6743b8ac29c889a96 | [
"MIT"
] | null | null | null | tweaktime/conftest.py | black-redoc/tweaktime | a185990760a007a72f73eea6743b8ac29c889a96 | [
"MIT"
] | null | null | null | tweaktime/conftest.py | black-redoc/tweaktime | a185990760a007a72f73eea6743b8ac29c889a96 | [
"MIT"
] | null | null | null | import pytest
from tweaktime.users.models import User
from tweaktime.users.tests.factories import UserFactory
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir):
settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture
def user() -> User:
return UserFactory()
| 18.933333 | 55 | 0.778169 |
b93e38eaf2dc3882a7395a3a49594c71359d35f7 | 2,174 | py | Python | google/ads/google_ads/v4/proto/services/currency_constant_service_pb2_grpc.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | 1 | 2021-04-09T04:28:47.000Z | 2021-04-09T04:28:47.000Z | google/ads/google_ads/v4/proto/services/currency_constant_service_pb2_grpc.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v4/proto/services/currency_constant_service_pb2_grpc.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.ads.google_ads.v4.proto.resources import currency_constant_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_resources_dot_currency__constant__pb2
from google.ads.google_ads.v4.proto.services import currency_constant_service_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_services_dot_currency__constant__service__pb2
class CurrencyConstantServiceStub(object):
"""Service to fetch currency constants.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetCurrencyConstant = channel.unary_unary(
'/google.ads.googleads.v4.services.CurrencyConstantService/GetCurrencyConstant',
request_serializer=google_dot_ads_dot_googleads__v4_dot_proto_dot_services_dot_currency__constant__service__pb2.GetCurrencyConstantRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v4_dot_proto_dot_resources_dot_currency__constant__pb2.CurrencyConstant.FromString,
)
class CurrencyConstantServiceServicer(object):
"""Service to fetch currency constants.
"""
def GetCurrencyConstant(self, request, context):
"""Returns the requested currency constant.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_CurrencyConstantServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetCurrencyConstant': grpc.unary_unary_rpc_method_handler(
servicer.GetCurrencyConstant,
request_deserializer=google_dot_ads_dot_googleads__v4_dot_proto_dot_services_dot_currency__constant__service__pb2.GetCurrencyConstantRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v4_dot_proto_dot_resources_dot_currency__constant__pb2.CurrencyConstant.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v4.services.CurrencyConstantService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 45.291667 | 177 | 0.821527 |
b0a3185ae6f29332c9f55bf5143e594bffc569eb | 1,777 | py | Python | magnum/tests/unit/conductor/test_k8s_api.py | mjbrewer/testIndex | 420dc071d4240a89b6f266e8d2575cedb39bfea0 | [
"Apache-2.0"
] | null | null | null | magnum/tests/unit/conductor/test_k8s_api.py | mjbrewer/testIndex | 420dc071d4240a89b6f266e8d2575cedb39bfea0 | [
"Apache-2.0"
] | null | null | null | magnum/tests/unit/conductor/test_k8s_api.py | mjbrewer/testIndex | 420dc071d4240a89b6f266e8d2575cedb39bfea0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Huawei Technologies Co.,LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mock import patch
from oslo_config import cfg
from magnum.conductor import k8s_api
from magnum import objects
from magnum.tests import base
class TestK8sAPI(base.TestCase):
@patch('magnum.objects.Bay.get_by_uuid')
def test_retrieve_k8s_api_endpoint(self, mock_bay_get_by_uuid):
expected_context = 'context'
expected_api_address = 'api_address'
expected_protocol = cfg.CONF.kubernetes.k8s_protocol
resource = objects.Pod({})
resource.bay_uuid = 'bay_uuid'
bay = objects.Bay({})
bay.api_address = expected_api_address
mock_bay_get_by_uuid.return_value = bay
actual_api_endpoint = k8s_api.K8sAPI._retrieve_k8s_api_endpoint(
expected_context, resource)
self.assertEqual("%s://%s" % (expected_protocol,
expected_api_address),
actual_api_endpoint)
@patch('magnum.conductor.k8s_api.K8sAPI')
def test_create_k8s_api(self, mock_k8s_api_cls):
context = 'context'
bay = objects.Bay({})
k8s_api.create_k8s_api(context, bay)
mock_k8s_api_cls.assert_called_once_with(context, bay)
| 35.54 | 75 | 0.704558 |
2f057dbaccc4a2196e18558418de71ed80ae3896 | 769 | py | Python | rectangle.py | rokimaru/oop_geometric_figures | 180b588658ee1434a4061c571ebf2c64e99e2297 | [
"Apache-2.0"
] | null | null | null | rectangle.py | rokimaru/oop_geometric_figures | 180b588658ee1434a4061c571ebf2c64e99e2297 | [
"Apache-2.0"
] | null | null | null | rectangle.py | rokimaru/oop_geometric_figures | 180b588658ee1434a4061c571ebf2c64e99e2297 | [
"Apache-2.0"
] | null | null | null | from figure import BaseFigure
class Rectangle(BaseFigure):
""" Класс с переменными и методами геометрической фигуры "прямоугольник" """
def __init__(self, side_a, side_b):
""" Метод-инициализатор класса "прямоугольник" """
self.name = 'Rectangle'
self.angles = 4
self.side_a = side_a
self.side_b = side_b
self.perimeter = self.calculate_perimeter()
self.area = self.calculate_area()
def calculate_perimeter(self):
""" Метод вычисляет периметр прямоугольника """
perimeter = 2 * (self.side_a + self.side_b)
return perimeter
def calculate_area(self):
""" Метод вычисляет площадь прямоугольника """
area = self.side_a * self.side_b
return area
| 27.464286 | 80 | 0.639792 |
5d187d8c34d66b68a74c763c4857aedbaf28dfb8 | 21,980 | py | Python | lib/JobBrowserBFF/JobBrowserBFFImpl.py | eapearson/kbase-skd-module-job-browser-bff | 426445f90569adac16632ef4921f174e51abd42f | [
"MIT"
] | null | null | null | lib/JobBrowserBFF/JobBrowserBFFImpl.py | eapearson/kbase-skd-module-job-browser-bff | 426445f90569adac16632ef4921f174e51abd42f | [
"MIT"
] | 6 | 2020-05-26T17:40:07.000Z | 2022-03-11T16:33:11.000Z | lib/JobBrowserBFF/JobBrowserBFFImpl.py | eapearson/kbase-skd-module-job-browser-bff | 426445f90569adac16632ef4921f174e51abd42f | [
"MIT"
] | 1 | 2020-05-26T17:12:59.000Z | 2020-05-26T17:12:59.000Z | # -*- coding: utf-8 -*-
# BEGIN_HEADER
import apsw
import logging
from JobBrowserBFF.Validation import Validation
from JobBrowserBFF.model.Model import Model
from JobBrowserBFF.definitions.Definitions import Definitions
from JobBrowserBFF.cache.AppCache import AppCache
from JobBrowserBFF.cache.UserProfileCache import UserProfileCache
from pathlib import Path
# END_HEADER
class JobBrowserBFF:
"""
Module Name:
JobBrowserBFF
Module Description:
A KBase module: JobBrowserBFF
"""
######## WARNING FOR GEVENT USERS ####### noqa
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "0.0.1"
GIT_URL = ""
GIT_COMMIT_HASH = "0de05d2b9029adbdcdb546279cb82c09e16daa7f"
# BEGIN_CLASS_HEADER
# END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
# BEGIN_CONSTRUCTOR
self.validation = Validation(schema_dir="impl", load_schemas=True)
# fix up the config because, as an INI file, everything is a string...
config["default-timeout"] = int(config["default-timeout"])
config["cache-refresh-interval"] = int(config["cache-refresh-interval"])
config["cache-refresh-initial-delay"] = int(
config["cache-refresh-initial-delay"]
)
self.validation.validate_config(config)
self.config = config
self.shared_folder = config["scratch"]
logging.basicConfig(
format="%(created)s %(levelname)s: %(message)s", level=logging.INFO
)
self.definitions = Definitions(load=True)
def setwal(db):
db.cursor().execute("pragma journal_mode=wal")
# custom auto checkpoint interval (use zero to disable)
db.wal_autocheckpoint(0)
apsw.connection_hooks.append(setwal)
# Set up cache directory
Path(config["cache-directory"]).mkdir(parents=True, exist_ok=True)
# The app cache can be populated upon load.
# TODO: need a process to refresh the cache periodically
app_cache_path = config["cache-directory"] + "/app.db"
app_cache = AppCache(
path=app_cache_path,
narrative_method_store_url=config["nms-url"],
upstream_timeout=60,
)
app_cache.initialize()
user_profile_cache_path = config["cache-directory"] + "/user_profile.db"
user_profile_cache = UserProfileCache(
path=user_profile_cache_path,
user_profile_url=config["user-profile-url"],
upstream_timeout=60,
)
user_profile_cache.initialize()
# END_CONSTRUCTOR
pass
def get_jobs(self, ctx, params):
"""
:param params: instance of type "GetJobsParams" (get_jobs Given a set
of job ids, returns the job information for each job, in the same
order as the ids were provided. As with other methods, this one
takes an "admin" parameter which indicates whether the call is
intended for administrator usage or not. If for administrator
usage, the token provided in the call must be associated with an
account with admin privileges for the upstream service. An error
with code 50 is returned otherwise. Params: - job_ids: a list of
job ids to look up and provide information about - admin: a
boolean indicating whether the request is for a admin usage or not
Returns: - jobs - list of JobStatus Throws: - 10 - Job not found:
If the any of the given job ids are not found) -> structure:
parameter "job_ids" of list of type "JobID" (A job id is a uuid),
parameter "admin" of type "bool" (In kb_sdk boolean values are
represented as integer 1 and 0)
:returns: instance of type "GetJobsResult" -> structure: parameter
"jobs" of list of type "JobInfo" -> structure: parameter "job_id"
of type "JobID" (A job id is a uuid), parameter "owner" of type
"User" -> structure: parameter "username" of type "username" (A
KBase username), parameter "realname" of String, parameter "state"
of type "JobState" (Superset of all fields used to represent job
state See the TS typing and json-schema) -> structure: parameter
"status" of type "JobStatus" (create | queue | run | complete |
error | terminate), parameter "create_at" of type "epoch_time"
(Time represented as epoch time in milliseconds), parameter
"queue_at" of type "epoch_time" (Time represented as epoch time in
milliseconds), parameter "run_at" of type "epoch_time" (Time
represented as epoch time in milliseconds), parameter "finish_at"
of type "epoch_time" (Time represented as epoch time in
milliseconds), parameter "client_group" of type "ClientGroup"
(njs, bigmem, bigmemlong, kb_import, ...), parameter "error" of
type "JobError" -> structure: parameter "code" of type
"JobErrorCode", parameter "message" of String, parameter
"service_error" of type "JSONRPC11Error" -> structure: parameter
"code" of Long, parameter "message" of String, parameter "error"
of unspecified object, parameter "termination" of type
"JobTermination" -> structure: parameter "code" of type
"JobTerminationCode", parameter "message" of String, parameter
"app" of type "AppInfo" -> structure: parameter "module_name" of
String, parameter "function_name" of String, parameter "title" of
String, parameter "client_groups" of list of String, parameter
"context" of type "JobContext" (The JobContext represents the
context in which the Job was run. The `type` field Every job is
run with some context. A) -> structure: parameter "type" of type
"JobContextType" (narrative, export, workspace, unknown),
parameter "workspace" of type "WorkspaceInfo" (Information about
the workspace the job is associated with. Most, but not all, jobs
are associated with a workspace. Note that only minimal
information is exposed here, since this is all the the job browser
requires. The design philosopy of this module is minimal support
of the associated ui component.) -> structure: parameter "id" of
Long, parameter "is_accessible" of type "bool" (In kb_sdk boolean
values are represented as integer 1 and 0), parameter "name" of
String, parameter "is_deleted" of type "bool" (In kb_sdk boolean
values are represented as integer 1 and 0), parameter "narrative"
of type "NarrativeInfo" (Information about the narrative with
which the job is associated, if the workspace it is associated
with is also a Narrative. Note that only minimal information is
available at this time, since this is all that is required of a
job browser. Future enhancments of a job browser may require
additional fields here.) -> structure: parameter "title" of
String, parameter "is_temporary" of type "bool" (In kb_sdk boolean
values are represented as integer 1 and 0)
"""
# ctx is the context object
# return variables are: result
# BEGIN get_jobs
self.validation.validate_params("get_jobs", params)
model = Model(
config=self.config, context=ctx, timeout=params["timeout"]
).get_model(ctx)
jobs, stats = model.get_jobs(params)
result = {"jobs": jobs, "stats": stats}
self.validation.validate_result("get_jobs", result)
return result
# END get_jobs
def query_jobs(self, ctx, params):
"""
:param params: instance of type "QueryJobsParams" (TODO: expand to
match the filtering, sorting, searching of kb_metrics) ->
structure: parameter "jobs" of list of type "JobID" (A job id is a
uuid), parameter "sort" of list of type "SortSpec" -> structure:
parameter "key" of type "SortKey" (behaves as an enum: narrative,
app, submitted, status), parameter "direction" of type
"SortDirection" (behaves as an enum: ascending, descending),
parameter "search" of type "SearchSpec" -> structure: parameter
"terms" of list of String, parameter "filter" of type "FilterSpec"
-> structure: parameter "workspace_id" of list of Long, parameter
"status" of list of String, parameter "username" of list of
String, parameter "app_id" of list of String, parameter "job_id"
of list of String, parameter "error_code" of list of Long,
parameter "terminated_code" of list of Long, parameter "time_span"
of type "TimeSpanSpec" -> structure: parameter "from" of type
"epoch_time" (Time represented as epoch time in milliseconds),
parameter "to" of type "epoch_time" (Time represented as epoch
time in milliseconds), parameter "client_groups" of list of type
"ClientGroup" (njs, bigmem, bigmemlong, kb_import, ...), parameter
"offset" of Long, parameter "limit" of Long, parameter "admin" of
type "bool" (In kb_sdk boolean values are represented as integer 1
and 0)
:returns: instance of type "QueryJobsResult" -> structure: parameter
"jobs" of list of type "JobInfo" -> structure: parameter "job_id"
of type "JobID" (A job id is a uuid), parameter "owner" of type
"User" -> structure: parameter "username" of type "username" (A
KBase username), parameter "realname" of String, parameter "state"
of type "JobState" (Superset of all fields used to represent job
state See the TS typing and json-schema) -> structure: parameter
"status" of type "JobStatus" (create | queue | run | complete |
error | terminate), parameter "create_at" of type "epoch_time"
(Time represented as epoch time in milliseconds), parameter
"queue_at" of type "epoch_time" (Time represented as epoch time in
milliseconds), parameter "run_at" of type "epoch_time" (Time
represented as epoch time in milliseconds), parameter "finish_at"
of type "epoch_time" (Time represented as epoch time in
milliseconds), parameter "client_group" of type "ClientGroup"
(njs, bigmem, bigmemlong, kb_import, ...), parameter "error" of
type "JobError" -> structure: parameter "code" of type
"JobErrorCode", parameter "message" of String, parameter
"service_error" of type "JSONRPC11Error" -> structure: parameter
"code" of Long, parameter "message" of String, parameter "error"
of unspecified object, parameter "termination" of type
"JobTermination" -> structure: parameter "code" of type
"JobTerminationCode", parameter "message" of String, parameter
"app" of type "AppInfo" -> structure: parameter "module_name" of
String, parameter "function_name" of String, parameter "title" of
String, parameter "client_groups" of list of String, parameter
"context" of type "JobContext" (The JobContext represents the
context in which the Job was run. The `type` field Every job is
run with some context. A) -> structure: parameter "type" of type
"JobContextType" (narrative, export, workspace, unknown),
parameter "workspace" of type "WorkspaceInfo" (Information about
the workspace the job is associated with. Most, but not all, jobs
are associated with a workspace. Note that only minimal
information is exposed here, since this is all the the job browser
requires. The design philosopy of this module is minimal support
of the associated ui component.) -> structure: parameter "id" of
Long, parameter "is_accessible" of type "bool" (In kb_sdk boolean
values are represented as integer 1 and 0), parameter "name" of
String, parameter "is_deleted" of type "bool" (In kb_sdk boolean
values are represented as integer 1 and 0), parameter "narrative"
of type "NarrativeInfo" (Information about the narrative with
which the job is associated, if the workspace it is associated
with is also a Narrative. Note that only minimal information is
available at this time, since this is all that is required of a
job browser. Future enhancments of a job browser may require
additional fields here.) -> structure: parameter "title" of
String, parameter "is_temporary" of type "bool" (In kb_sdk boolean
values are represented as integer 1 and 0), parameter
"found_count" of Long, parameter "total_count" of Long
"""
# ctx is the context object
# return variables are: result
# BEGIN query_jobs
self.validation.validate_params("query_jobs", params)
model = Model(self.config, ctx).get_model(ctx)
jobs, found_count, total_count, stats = model.query_jobs(params)
result = {
"jobs": jobs,
"found_count": found_count,
"total_count": total_count,
"stats": stats,
}
self.validation.validate_result("query_jobs", result)
return result
# END query_jobs
def get_job_log(self, ctx, params):
"""
:param params: instance of type "GetJobLogParams" -> structure:
parameter "job_id" of type "JobID" (A job id is a uuid), parameter
"search" of type "SearchSpec" -> structure: parameter "terms" of
list of String, parameter "level" of list of type "LogLevel"
(enum-like: default, error), parameter "offset" of Long, parameter
"limit" of Long, parameter "admin" of type "bool" (In kb_sdk
boolean values are represented as integer 1 and 0)
:returns: instance of type "GetJobLogResult" -> structure: parameter
"log" of list of type "LogEntry" -> structure: parameter
"entry_number" of Long, parameter "created" of Long, parameter
"entry" of String, parameter "level" of type "LogLevel"
(enum-like: default, error), parameter "total_count" of Long
"""
# ctx is the context object
# return variables are: result
# BEGIN get_job_log
self.validation.validate_params("get_job_log", params)
model = Model(
config=self.config, context=ctx, timeout=params["timeout"]
).get_model(ctx)
result = model.get_job_log(params)
self.validation.validate_result("get_job_log", result)
return result
# END get_job_log
def cancel_job(self, ctx, params):
"""
:param params: instance of type "CancelJobParams" (cancel_job Given a
job id, attempt to cancel the associated job. Params: - job_id:
The id for the job to cancel Returns: - nothing. Throws: - 10 -
Job not found: If the given job id was not found Note that
attempting to cancel a job which is not cancelable will not throw
an error. This behavior may change in the future. At present one
upstream service (njsw) ignores this condition, but another (ee2)
returns an error. For ee2 that error is ignored.) -> structure:
parameter "job_id" of type "JobID" (A job id is a uuid), parameter
"admin" of type "bool" (In kb_sdk boolean values are represented
as integer 1 and 0), parameter "timeout" of Long
:returns: instance of type "CancelJobResult" -> structure: parameter
"canceled" of type "bool" (In kb_sdk boolean values are
represented as integer 1 and 0)
"""
# ctx is the context object
# return variables are: result
# BEGIN cancel_job
self.validation.validate_params("cancel_job", params)
model = Model(
config=self.config, context=ctx, timeout=params["timeout"]
).get_model(ctx)
result = model.cancel_job(params)
self.validation.validate_result("cancel_job", result)
return result
# END cancel_job
def get_job_types(self, ctx):
"""
:returns: instance of type "GetJobTypesResult" (********* *
get_job_types *********) -> structure: parameter
"job_type_definitions" of list of type "DomainDefinition" ->
structure: parameter "code" of String, parameter "description" of
String, parameter "notes" of String
"""
# ctx is the context object
# return variables are: result
# BEGIN get_job_types
# No params to validate!
d = self.definitions.get("job_types")
result = {"job_types": d}
self.validation.validate_result("get_job_types", result)
return result
# END get_job_types
def get_job_states(self, ctx):
"""
:returns: instance of type "GetJobStatesResult" (********* *
get_job_states *********) -> structure: parameter "job_states" of
list of type "DomainDefinition" -> structure: parameter "code" of
String, parameter "description" of String, parameter "notes" of
String
"""
# ctx is the context object
# return variables are: result
# BEGIN get_job_states
d = self.definitions.get("job_states")
result = {"job_states": d}
self.validation.validate_result("get_job_states", result)
return result
# END get_job_states
def get_client_groups(self, ctx):
"""
:returns: instance of type "GetClientGroupsResult" (********* *
get_client_groups *********) -> structure: parameter
"client_groups" of list of type "ClientGroup" (njs, bigmem,
bigmemlong, kb_import, ...)
"""
# ctx is the context object
# return variables are: result
# BEGIN get_client_groups
model = Model(self.config, ctx).get_model(ctx)
result = model.get_client_groups()
self.validation.validate_result("get_client_groups", result)
return result
# END get_client_groups
def get_searchable_job_fields(self, ctx):
"""
:returns: instance of type "GetSearchableJobFieldsResult" (*********
* get_searchable_job_fields *********) -> structure: parameter
"searchable_job_fields" of list of type "DomainDefinition" ->
structure: parameter "code" of String, parameter "description" of
String, parameter "notes" of String
"""
# ctx is the context object
# return variables are: result
# BEGIN get_searchable_job_fields
d = self.definitions.get("searchable_job_fields")
result = {"searchable_job_fields": d}
self.validation.validate_result("get_searchable_job_fields", result)
return result
# END get_searchable_job_fields
def get_sort_specs(self, ctx):
"""
:returns: instance of type "GetSortSpecsResult" -> structure:
parameter "sort_fields" of list of type "SortSpecDefinition"
(********* * get_sort_keys *********) -> structure: parameter
"key" of String, parameter "fields" of list of String, parameter
"description" of String
"""
# ctx is the context object
# return variables are: result
# BEGIN get_sort_specs
d = self.definitions.get("sort_specs")
result = {"sort_specs": d}
self.validation.validate_result("get_sort_specs", result)
return result
# END get_sort_specs
def get_log_levels(self, ctx):
"""
:returns: instance of type "GetLogLevelsResult" (********* *
get_log_levels *********) -> structure: parameter "log_levels" of
list of type "OrderedDomainDefinition" -> structure: parameter
"code" of String, parameter "order" of Long, parameter
"description" of String, parameter "notes" of String
"""
# ctx is the context object
# return variables are: result
# BEGIN get_log_levels
d = self.definitions.get("log_levels")
result = {"log_levels": d}
self.validation.validate_result("get_log_levels", result)
return result
# END get_log_levels
def is_admin(self, ctx):
"""
:returns: instance of type "IsAdminResult" (********* * is_admin
*********) -> structure: parameter "is_admin" of type "bool" (In
kb_sdk boolean values are represented as integer 1 and 0)
"""
# ctx is the context object
# return variables are: result
# BEGIN is_admin
model = Model(self.config, ctx).get_model(ctx)
is_admin = model.is_admin()
result = {"is_admin": is_admin}
self.validation.validate_result("is_admin", result)
return result
# END is_admin
def status(self, ctx):
# BEGIN_STATUS
returnVal = {
"state": "OK",
"message": "",
"version": self.VERSION,
"git_url": self.GIT_URL,
"git_commit_hash": self.GIT_COMMIT_HASH,
}
return returnVal
# END_STATUS
| 48.414097 | 80 | 0.631938 |
fefa1a8f8b42e5cfcd4daade726378deffea990a | 51,430 | py | Python | ghidra_9.0/Ghidra/Features/Python/data/jython-2.7.1/Lib/compiler/transformer.py | ChristopherMorrison/ghidra | e53aa57d1aff79a1df93728f746705c58fe95ab0 | [
"Apache-2.0"
] | 577 | 2020-06-04T16:34:44.000Z | 2022-03-31T11:46:07.000Z | ghidra_9.0/Ghidra/Features/Python/data/jython-2.7.1/Lib/compiler/transformer.py | ChristopherMorrison/ghidra | e53aa57d1aff79a1df93728f746705c58fe95ab0 | [
"Apache-2.0"
] | 174 | 2015-01-08T20:37:09.000Z | 2020-06-03T16:48:59.000Z | ghidra_9.0/Ghidra/Features/Python/data/jython-2.7.1/Lib/compiler/transformer.py | ChristopherMorrison/ghidra | e53aa57d1aff79a1df93728f746705c58fe95ab0 | [
"Apache-2.0"
] | 162 | 2015-02-07T02:14:38.000Z | 2020-05-30T16:42:03.000Z | """Parse tree transformation module.
Transforms Python source code into an abstract syntax tree (AST)
defined in the ast module.
The simplest ways to invoke this module are via parse and parseFile.
parse(buf) -> AST
parseFile(path) -> AST
"""
# Original version written by Greg Stein (gstein@lyra.org)
# and Bill Tutt (rassilon@lima.mudlib.org)
# February 1997.
#
# Modifications and improvements for Python 2.0 by Jeremy Hylton and
# Mark Hammond
#
# Some fixes to try to have correct line number on almost all nodes
# (except Module, Discard and Stmt) added by Sylvain Thenault
#
# Portions of this file are:
# Copyright (C) 1997-1998 Greg Stein. All Rights Reserved.
#
# This module is provided under a BSD-ish license. See
# http://www.opensource.org/licenses/bsd-license.html
# and replace OWNER, ORGANIZATION, and YEAR as appropriate.
from compiler.ast import *
import symbol
import token
import sys
if not sys.platform.startswith('java'):
import parser
class WalkerError(StandardError):
pass
from compiler.consts import CO_VARARGS, CO_VARKEYWORDS
from compiler.consts import OP_ASSIGN, OP_DELETE, OP_APPLY
def parseFile(path):
f = open(path, "U")
# XXX The parser API tolerates files without a trailing newline,
# but not strings without a trailing newline. Always add an extra
# newline to the file contents, since we're going through the string
# version of the API.
src = f.read() + "\n"
f.close()
return parse(src)
def parse(buf, mode="exec"):
if mode == "exec" or mode == "single":
return Transformer().parsesuite(buf)
elif mode == "eval":
return Transformer().parseexpr(buf)
else:
raise ValueError("compile() arg 3 must be"
" 'exec' or 'eval' or 'single'")
def asList(nodes):
l = []
for item in nodes:
if hasattr(item, "asList"):
l.append(item.asList())
else:
if type(item) is type( (None, None) ):
l.append(tuple(asList(item)))
elif type(item) is type( [] ):
l.append(asList(item))
else:
l.append(item)
return l
def extractLineNo(ast):
if not isinstance(ast[1], tuple):
# get a terminal node
return ast[2]
for child in ast[1:]:
if isinstance(child, tuple):
lineno = extractLineNo(child)
if lineno is not None:
return lineno
def Node(*args):
kind = args[0]
if nodes.has_key(kind):
try:
return nodes[kind](*args[1:])
except TypeError:
print nodes[kind], len(args), args
raise
else:
raise WalkerError, "Can't find appropriate Node type: %s" % str(args)
#return apply(ast.Node, args)
class Transformer:
"""Utility object for transforming Python parse trees.
Exposes the following methods:
tree = transform(ast_tree)
tree = parsesuite(text)
tree = parseexpr(text)
tree = parsefile(fileob | filename)
"""
def __init__(self):
self._dispatch = {}
for value, name in symbol.sym_name.items():
if hasattr(self, name):
self._dispatch[value] = getattr(self, name)
self._dispatch[token.NEWLINE] = self.com_NEWLINE
self._atom_dispatch = {token.LPAR: self.atom_lpar,
token.LSQB: self.atom_lsqb,
token.LBRACE: self.atom_lbrace,
token.BACKQUOTE: self.atom_backquote,
token.NUMBER: self.atom_number,
token.STRING: self.atom_string,
token.NAME: self.atom_name,
}
self.encoding = None
def transform(self, tree):
"""Transform an AST into a modified parse tree."""
if not (isinstance(tree, tuple) or isinstance(tree, list)):
tree = parser.ast2tuple(tree, line_info=1)
return self.compile_node(tree)
def parsesuite(self, text):
"""Return a modified parse tree for the given suite text."""
return self.transform(parser.suite(text))
def parseexpr(self, text):
"""Return a modified parse tree for the given expression text."""
return self.transform(parser.expr(text))
def parsefile(self, file):
"""Return a modified parse tree for the contents of the given file."""
if type(file) == type(''):
file = open(file)
return self.parsesuite(file.read())
# --------------------------------------------------------------
#
# PRIVATE METHODS
#
def compile_node(self, node):
### emit a line-number node?
n = node[0]
if n == symbol.encoding_decl:
self.encoding = node[2]
node = node[1]
n = node[0]
if n == symbol.single_input:
return self.single_input(node[1:])
if n == symbol.file_input:
return self.file_input(node[1:])
if n == symbol.eval_input:
return self.eval_input(node[1:])
if n == symbol.lambdef:
return self.lambdef(node[1:])
if n == symbol.funcdef:
return self.funcdef(node[1:])
if n == symbol.classdef:
return self.classdef(node[1:])
raise WalkerError, ('unexpected node type', n)
def single_input(self, node):
### do we want to do anything about being "interactive" ?
# NEWLINE | simple_stmt | compound_stmt NEWLINE
n = node[0][0]
if n != token.NEWLINE:
return self.com_stmt(node[0])
return Pass()
def file_input(self, nodelist):
doc = self.get_docstring(nodelist, symbol.file_input)
if doc is not None:
i = 1
else:
i = 0
stmts = []
for node in nodelist[i:]:
if node[0] != token.ENDMARKER and node[0] != token.NEWLINE:
self.com_append_stmt(stmts, node)
return Module(doc, Stmt(stmts))
def eval_input(self, nodelist):
# from the built-in function input()
### is this sufficient?
return Expression(self.com_node(nodelist[0]))
def decorator_name(self, nodelist):
listlen = len(nodelist)
assert listlen >= 1 and listlen % 2 == 1
item = self.atom_name(nodelist)
i = 1
while i < listlen:
assert nodelist[i][0] == token.DOT
assert nodelist[i + 1][0] == token.NAME
item = Getattr(item, nodelist[i + 1][1])
i += 2
return item
def decorator(self, nodelist):
# '@' dotted_name [ '(' [arglist] ')' ]
assert len(nodelist) in (3, 5, 6)
assert nodelist[0][0] == token.AT
assert nodelist[-1][0] == token.NEWLINE
assert nodelist[1][0] == symbol.dotted_name
funcname = self.decorator_name(nodelist[1][1:])
if len(nodelist) > 3:
assert nodelist[2][0] == token.LPAR
expr = self.com_call_function(funcname, nodelist[3])
else:
expr = funcname
return expr
def decorators(self, nodelist):
# decorators: decorator ([NEWLINE] decorator)* NEWLINE
items = []
for dec_nodelist in nodelist:
assert dec_nodelist[0] == symbol.decorator
items.append(self.decorator(dec_nodelist[1:]))
return Decorators(items)
def funcdef(self, nodelist):
# -6 -5 -4 -3 -2 -1
# funcdef: [decorators] 'def' NAME parameters ':' suite
# parameters: '(' [varargslist] ')'
if len(nodelist) == 6:
assert nodelist[0][0] == symbol.decorators
decorators = self.decorators(nodelist[0][1:])
else:
assert len(nodelist) == 5
decorators = None
lineno = nodelist[-4][2]
name = nodelist[-4][1]
args = nodelist[-3][2]
if args[0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(args[1:])
else:
names = defaults = ()
flags = 0
doc = self.get_docstring(nodelist[-1])
# code for function
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
return Function(decorators, name, names, defaults, flags, doc, code,
lineno=lineno)
def lambdef(self, nodelist):
# lambdef: 'lambda' [varargslist] ':' test
if nodelist[2][0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(nodelist[2][1:])
else:
names = defaults = ()
flags = 0
# code for lambda
code = self.com_node(nodelist[-1])
return Lambda(names, defaults, flags, code, lineno=nodelist[1][2])
old_lambdef = lambdef
def classdef(self, nodelist):
# classdef: 'class' NAME ['(' [testlist] ')'] ':' suite
name = nodelist[1][1]
doc = self.get_docstring(nodelist[-1])
if nodelist[2][0] == token.COLON:
bases = []
elif nodelist[3][0] == token.RPAR:
bases = []
else:
bases = self.com_bases(nodelist[3])
# code for class
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
return Class(name, bases, doc, code, lineno=nodelist[1][2])
def stmt(self, nodelist):
return self.com_stmt(nodelist[0])
small_stmt = stmt
flow_stmt = stmt
compound_stmt = stmt
def simple_stmt(self, nodelist):
# small_stmt (';' small_stmt)* [';'] NEWLINE
stmts = []
for i in range(0, len(nodelist), 2):
self.com_append_stmt(stmts, nodelist[i])
return Stmt(stmts)
def parameters(self, nodelist):
raise WalkerError
def varargslist(self, nodelist):
raise WalkerError
def fpdef(self, nodelist):
raise WalkerError
def fplist(self, nodelist):
raise WalkerError
def dotted_name(self, nodelist):
raise WalkerError
def comp_op(self, nodelist):
raise WalkerError
def trailer(self, nodelist):
raise WalkerError
def sliceop(self, nodelist):
raise WalkerError
def argument(self, nodelist):
raise WalkerError
# --------------------------------------------------------------
#
# STATEMENT NODES (invoked by com_node())
#
def expr_stmt(self, nodelist):
# augassign testlist | testlist ('=' testlist)*
en = nodelist[-1]
exprNode = self.lookup_node(en)(en[1:])
if len(nodelist) == 1:
return Discard(exprNode, lineno=exprNode.lineno)
if nodelist[1][0] == token.EQUAL:
nodesl = []
for i in range(0, len(nodelist) - 2, 2):
nodesl.append(self.com_assign(nodelist[i], OP_ASSIGN))
return Assign(nodesl, exprNode, lineno=nodelist[1][2])
else:
lval = self.com_augassign(nodelist[0])
op = self.com_augassign_op(nodelist[1])
return AugAssign(lval, op[1], exprNode, lineno=op[2])
raise WalkerError, "can't get here"
def print_stmt(self, nodelist):
# print ([ test (',' test)* [','] ] | '>>' test [ (',' test)+ [','] ])
items = []
if len(nodelist) == 1:
start = 1
dest = None
elif nodelist[1][0] == token.RIGHTSHIFT:
assert len(nodelist) == 3 \
or nodelist[3][0] == token.COMMA
dest = self.com_node(nodelist[2])
start = 4
else:
dest = None
start = 1
for i in range(start, len(nodelist), 2):
items.append(self.com_node(nodelist[i]))
if nodelist[-1][0] == token.COMMA:
return Print(items, dest, lineno=nodelist[0][2])
return Printnl(items, dest, lineno=nodelist[0][2])
def del_stmt(self, nodelist):
return self.com_assign(nodelist[1], OP_DELETE)
def pass_stmt(self, nodelist):
return Pass(lineno=nodelist[0][2])
def break_stmt(self, nodelist):
return Break(lineno=nodelist[0][2])
def continue_stmt(self, nodelist):
return Continue(lineno=nodelist[0][2])
def return_stmt(self, nodelist):
# return: [testlist]
if len(nodelist) < 2:
return Return(Const(None), lineno=nodelist[0][2])
return Return(self.com_node(nodelist[1]), lineno=nodelist[0][2])
def yield_stmt(self, nodelist):
expr = self.com_node(nodelist[0])
return Discard(expr, lineno=expr.lineno)
def yield_expr(self, nodelist):
if len(nodelist) > 1:
value = self.com_node(nodelist[1])
else:
value = Const(None)
return Yield(value, lineno=nodelist[0][2])
def raise_stmt(self, nodelist):
# raise: [test [',' test [',' test]]]
if len(nodelist) > 5:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
if len(nodelist) > 3:
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
if len(nodelist) > 1:
expr1 = self.com_node(nodelist[1])
else:
expr1 = None
return Raise(expr1, expr2, expr3, lineno=nodelist[0][2])
def import_stmt(self, nodelist):
# import_stmt: import_name | import_from
assert len(nodelist) == 1
return self.com_node(nodelist[0])
def import_name(self, nodelist):
# import_name: 'import' dotted_as_names
return Import(self.com_dotted_as_names(nodelist[1]),
lineno=nodelist[0][2])
def import_from(self, nodelist):
# import_from: 'from' ('.'* dotted_name | '.') 'import' ('*' |
# '(' import_as_names ')' | import_as_names)
assert nodelist[0][1] == 'from'
idx = 1
while nodelist[idx][1] == '.':
idx += 1
level = idx - 1
if nodelist[idx][0] == symbol.dotted_name:
fromname = self.com_dotted_name(nodelist[idx])
idx += 1
else:
fromname = ""
assert nodelist[idx][1] == 'import'
if nodelist[idx + 1][0] == token.STAR:
return From(fromname, [('*', None)], level,
lineno=nodelist[0][2])
else:
node = nodelist[idx + 1 + (nodelist[idx + 1][0] == token.LPAR)]
return From(fromname, self.com_import_as_names(node), level,
lineno=nodelist[0][2])
def global_stmt(self, nodelist):
# global: NAME (',' NAME)*
names = []
for i in range(1, len(nodelist), 2):
names.append(nodelist[i][1])
return Global(names, lineno=nodelist[0][2])
def exec_stmt(self, nodelist):
# exec_stmt: 'exec' expr ['in' expr [',' expr]]
expr1 = self.com_node(nodelist[1])
if len(nodelist) >= 4:
expr2 = self.com_node(nodelist[3])
if len(nodelist) >= 6:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
else:
expr2 = expr3 = None
return Exec(expr1, expr2, expr3, lineno=nodelist[0][2])
def assert_stmt(self, nodelist):
# 'assert': test, [',' test]
expr1 = self.com_node(nodelist[1])
if (len(nodelist) == 4):
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
return Assert(expr1, expr2, lineno=nodelist[0][2])
def if_stmt(self, nodelist):
# if: test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
tests = []
for i in range(0, len(nodelist) - 3, 4):
testNode = self.com_node(nodelist[i + 1])
suiteNode = self.com_node(nodelist[i + 3])
tests.append((testNode, suiteNode))
if len(nodelist) % 4 == 3:
elseNode = self.com_node(nodelist[-1])
## elseNode.lineno = nodelist[-1][1][2]
else:
elseNode = None
return If(tests, elseNode, lineno=nodelist[0][2])
def while_stmt(self, nodelist):
# 'while' test ':' suite ['else' ':' suite]
testNode = self.com_node(nodelist[1])
bodyNode = self.com_node(nodelist[3])
if len(nodelist) > 4:
elseNode = self.com_node(nodelist[6])
else:
elseNode = None
return While(testNode, bodyNode, elseNode, lineno=nodelist[0][2])
def for_stmt(self, nodelist):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
assignNode = self.com_assign(nodelist[1], OP_ASSIGN)
listNode = self.com_node(nodelist[3])
bodyNode = self.com_node(nodelist[5])
if len(nodelist) > 8:
elseNode = self.com_node(nodelist[8])
else:
elseNode = None
return For(assignNode, listNode, bodyNode, elseNode,
lineno=nodelist[0][2])
def try_stmt(self, nodelist):
return self.com_try_except_finally(nodelist)
def with_stmt(self, nodelist):
return self.com_with(nodelist)
def with_var(self, nodelist):
return self.com_with_var(nodelist)
def suite(self, nodelist):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if len(nodelist) == 1:
return self.com_stmt(nodelist[0])
stmts = []
for node in nodelist:
if node[0] == symbol.stmt:
self.com_append_stmt(stmts, node)
return Stmt(stmts)
# --------------------------------------------------------------
#
# EXPRESSION NODES (invoked by com_node())
#
def testlist(self, nodelist):
# testlist: expr (',' expr)* [',']
# testlist_safe: test [(',' test)+ [',']]
# exprlist: expr (',' expr)* [',']
return self.com_binary(Tuple, nodelist)
testlist_safe = testlist # XXX
testlist1 = testlist
exprlist = testlist
def testlist_gexp(self, nodelist):
if len(nodelist) == 2 and nodelist[1][0] == symbol.gen_for:
test = self.com_node(nodelist[0])
return self.com_generator_expression(test, nodelist[1])
return self.testlist(nodelist)
def test(self, nodelist):
# or_test ['if' or_test 'else' test] | lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
then = self.com_node(nodelist[0])
if len(nodelist) > 1:
assert len(nodelist) == 5
assert nodelist[1][1] == 'if'
assert nodelist[3][1] == 'else'
test = self.com_node(nodelist[2])
else_ = self.com_node(nodelist[4])
return IfExp(test, then, else_, lineno=nodelist[1][2])
return then
def or_test(self, nodelist):
# and_test ('or' and_test)* | lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
return self.com_binary(Or, nodelist)
old_test = or_test
def and_test(self, nodelist):
# not_test ('and' not_test)*
return self.com_binary(And, nodelist)
def not_test(self, nodelist):
# 'not' not_test | comparison
result = self.com_node(nodelist[-1])
if len(nodelist) == 2:
return Not(result, lineno=nodelist[0][2])
return result
def comparison(self, nodelist):
# comparison: expr (comp_op expr)*
node = self.com_node(nodelist[0])
if len(nodelist) == 1:
return node
results = []
for i in range(2, len(nodelist), 2):
nl = nodelist[i-1]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
n = nl[1]
if n[0] == token.NAME:
type = n[1]
if len(nl) == 3:
if type == 'not':
type = 'not in'
else:
type = 'is not'
else:
type = _cmp_types[n[0]]
lineno = nl[1][2]
results.append((type, self.com_node(nodelist[i])))
# we need a special "compare" node so that we can distinguish
# 3 < x < 5 from (3 < x) < 5
# the two have very different semantics and results (note that the
# latter form is always true)
return Compare(node, results, lineno=lineno)
def expr(self, nodelist):
# xor_expr ('|' xor_expr)*
return self.com_binary(Bitor, nodelist)
def xor_expr(self, nodelist):
# xor_expr ('^' xor_expr)*
return self.com_binary(Bitxor, nodelist)
def and_expr(self, nodelist):
# xor_expr ('&' xor_expr)*
return self.com_binary(Bitand, nodelist)
def shift_expr(self, nodelist):
# shift_expr ('<<'|'>>' shift_expr)*
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == token.LEFTSHIFT:
node = LeftShift([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == token.RIGHTSHIFT:
node = RightShift([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def arith_expr(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == token.PLUS:
node = Add([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == token.MINUS:
node = Sub([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def term(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
t = nodelist[i-1][0]
if t == token.STAR:
node = Mul([node, right])
elif t == token.SLASH:
node = Div([node, right])
elif t == token.PERCENT:
node = Mod([node, right])
elif t == token.DOUBLESLASH:
node = FloorDiv([node, right])
else:
raise ValueError, "unexpected token: %s" % t
node.lineno = nodelist[1][2]
return node
def factor(self, nodelist):
elt = nodelist[0]
t = elt[0]
node = self.lookup_node(nodelist[-1])(nodelist[-1][1:])
# need to handle (unary op)constant here...
if t == token.PLUS:
return UnaryAdd(node, lineno=elt[2])
elif t == token.MINUS:
return UnarySub(node, lineno=elt[2])
elif t == token.TILDE:
node = Invert(node, lineno=elt[2])
return node
def power(self, nodelist):
# power: atom trailer* ('**' factor)*
node = self.com_node(nodelist[0])
for i in range(1, len(nodelist)):
elt = nodelist[i]
if elt[0] == token.DOUBLESTAR:
return Power([node, self.com_node(nodelist[i+1])],
lineno=elt[2])
node = self.com_apply_trailer(node, elt)
return node
def atom(self, nodelist):
return self._atom_dispatch[nodelist[0][0]](nodelist)
def atom_lpar(self, nodelist):
if nodelist[1][0] == token.RPAR:
return Tuple((), lineno=nodelist[0][2])
return self.com_node(nodelist[1])
def atom_lsqb(self, nodelist):
if nodelist[1][0] == token.RSQB:
return List((), lineno=nodelist[0][2])
return self.com_list_constructor(nodelist[1])
def atom_lbrace(self, nodelist):
if nodelist[1][0] == token.RBRACE:
return Dict((), lineno=nodelist[0][2])
return self.com_dictmaker(nodelist[1])
def atom_backquote(self, nodelist):
return Backquote(self.com_node(nodelist[1]))
def atom_number(self, nodelist):
### need to verify this matches compile.c
k = eval(nodelist[0][1])
return Const(k, lineno=nodelist[0][2])
def decode_literal(self, lit):
if self.encoding:
# this is particularly fragile & a bit of a
# hack... changes in compile.c:parsestr and
# tokenizer.c must be reflected here.
if self.encoding not in ['utf-8', 'iso-8859-1']:
lit = unicode(lit, 'utf-8').encode(self.encoding)
return eval("# coding: %s\n%s" % (self.encoding, lit))
else:
return eval(lit)
def atom_string(self, nodelist):
k = ''
for node in nodelist:
k += self.decode_literal(node[1])
return Const(k, lineno=nodelist[0][2])
def atom_name(self, nodelist):
return Name(nodelist[0][1], lineno=nodelist[0][2])
# --------------------------------------------------------------
#
# INTERNAL PARSING UTILITIES
#
# The use of com_node() introduces a lot of extra stack frames,
# enough to cause a stack overflow compiling test.test_parser with
# the standard interpreter recursionlimit. The com_node() is a
# convenience function that hides the dispatch details, but comes
# at a very high cost. It is more efficient to dispatch directly
# in the callers. In these cases, use lookup_node() and call the
# dispatched node directly.
def lookup_node(self, node):
return self._dispatch[node[0]]
def com_node(self, node):
# Note: compile.c has handling in com_node for del_stmt, pass_stmt,
# break_stmt, stmt, small_stmt, flow_stmt, simple_stmt,
# and compound_stmt.
# We'll just dispatch them.
return self._dispatch[node[0]](node[1:])
def com_NEWLINE(self, *args):
# A ';' at the end of a line can make a NEWLINE token appear
# here, Render it harmless. (genc discards ('discard',
# ('const', xxxx)) Nodes)
return Discard(Const(None))
def com_arglist(self, nodelist):
# varargslist:
# (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME)
# | fpdef ['=' test] (',' fpdef ['=' test])* [',']
# fpdef: NAME | '(' fplist ')'
# fplist: fpdef (',' fpdef)* [',']
names = []
defaults = []
flags = 0
i = 0
while i < len(nodelist):
node = nodelist[i]
if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
if node[0] == token.STAR:
node = nodelist[i+1]
if node[0] == token.NAME:
names.append(node[1])
flags = flags | CO_VARARGS
i = i + 3
if i < len(nodelist):
# should be DOUBLESTAR
t = nodelist[i][0]
if t == token.DOUBLESTAR:
node = nodelist[i+1]
else:
raise ValueError, "unexpected token: %s" % t
names.append(node[1])
flags = flags | CO_VARKEYWORDS
break
# fpdef: NAME | '(' fplist ')'
names.append(self.com_fpdef(node))
i = i + 1
if i < len(nodelist) and nodelist[i][0] == token.EQUAL:
defaults.append(self.com_node(nodelist[i + 1]))
i = i + 2
elif len(defaults):
# we have already seen an argument with default, but here
# came one without
raise SyntaxError, "non-default argument follows default argument"
# skip the comma
i = i + 1
return names, defaults, flags
def com_fpdef(self, node):
# fpdef: NAME | '(' fplist ')'
if node[1][0] == token.LPAR:
return self.com_fplist(node[2])
return node[1][1]
def com_fplist(self, node):
# fplist: fpdef (',' fpdef)* [',']
if len(node) == 2:
return self.com_fpdef(node[1])
list = []
for i in range(1, len(node), 2):
list.append(self.com_fpdef(node[i]))
return tuple(list)
def com_dotted_name(self, node):
# String together the dotted names and return the string
name = ""
for n in node:
if type(n) == type(()) and n[0] == 1:
name = name + n[1] + '.'
return name[:-1]
def com_dotted_as_name(self, node):
assert node[0] == symbol.dotted_as_name
node = node[1:]
dot = self.com_dotted_name(node[0][1:])
if len(node) == 1:
return dot, None
assert node[1][1] == 'as'
assert node[2][0] == token.NAME
return dot, node[2][1]
def com_dotted_as_names(self, node):
assert node[0] == symbol.dotted_as_names
node = node[1:]
names = [self.com_dotted_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_dotted_as_name(node[i]))
return names
def com_import_as_name(self, node):
assert node[0] == symbol.import_as_name
node = node[1:]
assert node[0][0] == token.NAME
if len(node) == 1:
return node[0][1], None
assert node[1][1] == 'as', node
assert node[2][0] == token.NAME
return node[0][1], node[2][1]
def com_import_as_names(self, node):
assert node[0] == symbol.import_as_names
node = node[1:]
names = [self.com_import_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_import_as_name(node[i]))
return names
def com_bases(self, node):
bases = []
for i in range(1, len(node), 2):
bases.append(self.com_node(node[i]))
return bases
def com_try_except_finally(self, nodelist):
# ('try' ':' suite
# ((except_clause ':' suite)+ ['else' ':' suite] ['finally' ':' suite]
# | 'finally' ':' suite))
if nodelist[3][0] == token.NAME:
# first clause is a finally clause: only try-finally
return TryFinally(self.com_node(nodelist[2]),
self.com_node(nodelist[5]),
lineno=nodelist[0][2])
#tryexcept: [TryNode, [except_clauses], elseNode)]
clauses = []
elseNode = None
finallyNode = None
for i in range(3, len(nodelist), 3):
node = nodelist[i]
if node[0] == symbol.except_clause:
# except_clause: 'except' [expr [',' expr]] */
if len(node) > 2:
expr1 = self.com_node(node[2])
if len(node) > 4:
expr2 = self.com_assign(node[4], OP_ASSIGN)
else:
expr2 = None
else:
expr1 = expr2 = None
clauses.append((expr1, expr2, self.com_node(nodelist[i+2])))
if node[0] == token.NAME:
if node[1] == 'else':
elseNode = self.com_node(nodelist[i+2])
elif node[1] == 'finally':
finallyNode = self.com_node(nodelist[i+2])
try_except = TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
lineno=nodelist[0][2])
if finallyNode:
return TryFinally(try_except, finallyNode, lineno=nodelist[0][2])
else:
return try_except
def com_with(self, nodelist):
# with_stmt: 'with' expr [with_var] ':' suite
expr = self.com_node(nodelist[1])
body = self.com_node(nodelist[-1])
if nodelist[2][0] == token.COLON:
var = None
else:
var = self.com_assign(nodelist[2][2], OP_ASSIGN)
return With(expr, var, body, lineno=nodelist[0][2])
def com_with_var(self, nodelist):
# with_var: 'as' expr
return self.com_node(nodelist[1])
def com_augassign_op(self, node):
assert node[0] == symbol.augassign
return node[1]
def com_augassign(self, node):
"""Return node suitable for lvalue of augmented assignment
Names, slices, and attributes are the only allowable nodes.
"""
l = self.com_node(node)
if l.__class__ in (Name, Slice, Subscript, Getattr):
return l
raise SyntaxError, "can't assign to %s" % l.__class__.__name__
def com_assign(self, node, assigning):
# return a node suitable for use as an "lvalue"
# loop to avoid trivial recursion
while 1:
t = node[0]
if t in (symbol.exprlist, symbol.testlist, symbol.testlist_safe, symbol.testlist_gexp):
if len(node) > 2:
return self.com_assign_tuple(node, assigning)
node = node[1]
elif t in _assign_types:
if len(node) > 2:
raise SyntaxError, "can't assign to operator"
node = node[1]
elif t == symbol.power:
if node[1][0] != symbol.atom:
raise SyntaxError, "can't assign to operator"
if len(node) > 2:
primary = self.com_node(node[1])
for i in range(2, len(node)-1):
ch = node[i]
if ch[0] == token.DOUBLESTAR:
raise SyntaxError, "can't assign to operator"
primary = self.com_apply_trailer(primary, ch)
return self.com_assign_trailer(primary, node[-1],
assigning)
node = node[1]
elif t == symbol.atom:
t = node[1][0]
if t == token.LPAR:
node = node[2]
if node[0] == token.RPAR:
raise SyntaxError, "can't assign to ()"
elif t == token.LSQB:
node = node[2]
if node[0] == token.RSQB:
raise SyntaxError, "can't assign to []"
return self.com_assign_list(node, assigning)
elif t == token.NAME:
return self.com_assign_name(node[1], assigning)
else:
raise SyntaxError, "can't assign to literal"
else:
raise SyntaxError, "bad assignment (%s)" % t
def com_assign_tuple(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
assigns.append(self.com_assign(node[i], assigning))
return AssTuple(assigns, lineno=extractLineNo(node))
def com_assign_list(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
if i + 1 < len(node):
if node[i + 1][0] == symbol.list_for:
raise SyntaxError, "can't assign to list comprehension"
assert node[i + 1][0] == token.COMMA, node[i + 1]
assigns.append(self.com_assign(node[i], assigning))
return AssList(assigns, lineno=extractLineNo(node))
def com_assign_name(self, node, assigning):
return AssName(node[1], assigning, lineno=node[2])
def com_assign_trailer(self, primary, node, assigning):
t = node[1][0]
if t == token.DOT:
return self.com_assign_attr(primary, node[2], assigning)
if t == token.LSQB:
return self.com_subscriptlist(primary, node[2], assigning)
if t == token.LPAR:
raise SyntaxError, "can't assign to function call"
raise SyntaxError, "unknown trailer type: %s" % t
def com_assign_attr(self, primary, node, assigning):
return AssAttr(primary, node[1], assigning, lineno=node[-1])
def com_binary(self, constructor, nodelist):
"Compile 'NODE (OP NODE)*' into (type, [ node1, ..., nodeN ])."
l = len(nodelist)
if l == 1:
n = nodelist[0]
return self.lookup_node(n)(n[1:])
items = []
for i in range(0, l, 2):
n = nodelist[i]
items.append(self.lookup_node(n)(n[1:]))
return constructor(items, lineno=extractLineNo(nodelist))
def com_stmt(self, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
return result
return Stmt([result])
def com_append_stmt(self, stmts, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
stmts.extend(result.nodes)
else:
stmts.append(result)
if hasattr(symbol, 'list_for'):
def com_list_constructor(self, nodelist):
# listmaker: test ( list_for | (',' test)* [','] )
values = []
for i in range(1, len(nodelist)):
if nodelist[i][0] == symbol.list_for:
assert len(nodelist[i:]) == 1
return self.com_list_comprehension(values[0],
nodelist[i])
elif nodelist[i][0] == token.COMMA:
continue
values.append(self.com_node(nodelist[i]))
return List(values, lineno=values[0].lineno)
def com_list_comprehension(self, expr, node):
# list_iter: list_for | list_if
# list_for: 'for' exprlist 'in' testlist [list_iter]
# list_if: 'if' test [list_iter]
# XXX should raise SyntaxError for assignment
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
listNode = self.com_node(node[4])
newfor = ListCompFor(assignNode, listNode, [])
newfor.lineno = node[1][2]
fors.append(newfor)
if len(node) == 5:
node = None
else:
node = self.com_list_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = ListCompIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
else:
node = self.com_list_iter(node[3])
else:
raise SyntaxError, \
("unexpected list comprehension element: %s %d"
% (node, lineno))
return ListComp(expr, fors, lineno=lineno)
def com_list_iter(self, node):
assert node[0] == symbol.list_iter
return node[1]
else:
def com_list_constructor(self, nodelist):
values = []
for i in range(1, len(nodelist), 2):
values.append(self.com_node(nodelist[i]))
return List(values, lineno=values[0].lineno)
if hasattr(symbol, 'gen_for'):
def com_generator_expression(self, expr, node):
# gen_iter: gen_for | gen_if
# gen_for: 'for' exprlist 'in' test [gen_iter]
# gen_if: 'if' test [gen_iter]
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
genNode = self.com_node(node[4])
newfor = GenExprFor(assignNode, genNode, [],
lineno=node[1][2])
fors.append(newfor)
if (len(node)) == 5:
node = None
else:
node = self.com_gen_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = GenExprIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
else:
node = self.com_gen_iter(node[3])
else:
raise SyntaxError, \
("unexpected generator expression element: %s %d"
% (node, lineno))
fors[0].is_outmost = True
return GenExpr(GenExprInner(expr, fors), lineno=lineno)
def com_gen_iter(self, node):
assert node[0] == symbol.gen_iter
return node[1]
def com_dictmaker(self, nodelist):
# dictmaker: test ':' test (',' test ':' value)* [',']
items = []
for i in range(1, len(nodelist), 4):
items.append((self.com_node(nodelist[i]),
self.com_node(nodelist[i+2])))
return Dict(items, lineno=items[0][0].lineno)
def com_apply_trailer(self, primaryNode, nodelist):
t = nodelist[1][0]
if t == token.LPAR:
return self.com_call_function(primaryNode, nodelist[2])
if t == token.DOT:
return self.com_select_member(primaryNode, nodelist[2])
if t == token.LSQB:
return self.com_subscriptlist(primaryNode, nodelist[2], OP_APPLY)
raise SyntaxError, 'unknown node type: %s' % t
def com_select_member(self, primaryNode, nodelist):
if nodelist[0] != token.NAME:
raise SyntaxError, "member must be a name"
return Getattr(primaryNode, nodelist[1], lineno=nodelist[2])
def com_call_function(self, primaryNode, nodelist):
if nodelist[0] == token.RPAR:
return CallFunc(primaryNode, [], lineno=extractLineNo(nodelist))
args = []
kw = 0
len_nodelist = len(nodelist)
for i in range(1, len_nodelist, 2):
node = nodelist[i]
if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
break
kw, result = self.com_argument(node, kw)
if len_nodelist != 2 and isinstance(result, GenExpr) \
and len(node) == 3 and node[2][0] == symbol.gen_for:
# allow f(x for x in y), but reject f(x for x in y, 1)
# should use f((x for x in y), 1) instead of f(x for x in y, 1)
raise SyntaxError, 'generator expression needs parenthesis'
args.append(result)
else:
# No broken by star arg, so skip the last one we processed.
i = i + 1
if i < len_nodelist and nodelist[i][0] == token.COMMA:
# need to accept an application that looks like "f(a, b,)"
i = i + 1
star_node = dstar_node = None
while i < len_nodelist:
tok = nodelist[i]
ch = nodelist[i+1]
i = i + 3
if tok[0]==token.STAR:
if star_node is not None:
raise SyntaxError, 'already have the varargs indentifier'
star_node = self.com_node(ch)
elif tok[0]==token.DOUBLESTAR:
if dstar_node is not None:
raise SyntaxError, 'already have the kwargs indentifier'
dstar_node = self.com_node(ch)
else:
raise SyntaxError, 'unknown node type: %s' % tok
return CallFunc(primaryNode, args, star_node, dstar_node,
lineno=extractLineNo(nodelist))
def com_argument(self, nodelist, kw):
if len(nodelist) == 3 and nodelist[2][0] == symbol.gen_for:
test = self.com_node(nodelist[1])
return 0, self.com_generator_expression(test, nodelist[2])
if len(nodelist) == 2:
if kw:
raise SyntaxError, "non-keyword arg after keyword arg"
return 0, self.com_node(nodelist[1])
result = self.com_node(nodelist[3])
n = nodelist[1]
while len(n) == 2 and n[0] != token.NAME:
n = n[1]
if n[0] != token.NAME:
raise SyntaxError, "keyword can't be an expression (%s)"%n[0]
node = Keyword(n[1], result, lineno=n[2])
return 1, node
def com_subscriptlist(self, primary, nodelist, assigning):
# slicing: simple_slicing | extended_slicing
# simple_slicing: primary "[" short_slice "]"
# extended_slicing: primary "[" slice_list "]"
# slice_list: slice_item ("," slice_item)* [","]
# backwards compat slice for '[i:j]'
if len(nodelist) == 2:
sub = nodelist[1]
if (sub[1][0] == token.COLON or \
(len(sub) > 2 and sub[2][0] == token.COLON)) and \
sub[-1][0] != symbol.sliceop:
return self.com_slice(primary, sub, assigning)
subscripts = []
for i in range(1, len(nodelist), 2):
subscripts.append(self.com_subscript(nodelist[i]))
return Subscript(primary, assigning, subscripts,
lineno=extractLineNo(nodelist))
def com_subscript(self, node):
# slice_item: expression | proper_slice | ellipsis
ch = node[1]
t = ch[0]
if t == token.DOT and node[2][0] == token.DOT:
return Ellipsis()
if t == token.COLON or len(node) > 2:
return self.com_sliceobj(node)
return self.com_node(ch)
def com_sliceobj(self, node):
# proper_slice: short_slice | long_slice
# short_slice: [lower_bound] ":" [upper_bound]
# long_slice: short_slice ":" [stride]
# lower_bound: expression
# upper_bound: expression
# stride: expression
#
# Note: a stride may be further slicing...
items = []
if node[1][0] == token.COLON:
items.append(Const(None))
i = 2
else:
items.append(self.com_node(node[1]))
# i == 2 is a COLON
i = 3
if i < len(node) and node[i][0] == symbol.test:
items.append(self.com_node(node[i]))
i = i + 1
else:
items.append(Const(None))
# a short_slice has been built. look for long_slice now by looking
# for strides...
for j in range(i, len(node)):
ch = node[j]
if len(ch) == 2:
items.append(Const(None))
else:
items.append(self.com_node(ch[2]))
return Sliceobj(items, lineno=extractLineNo(node))
def com_slice(self, primary, node, assigning):
# short_slice: [lower_bound] ":" [upper_bound]
lower = upper = None
if len(node) == 3:
if node[1][0] == token.COLON:
upper = self.com_node(node[2])
else:
lower = self.com_node(node[1])
elif len(node) == 4:
lower = self.com_node(node[1])
upper = self.com_node(node[3])
return Slice(primary, assigning, lower, upper,
lineno=extractLineNo(node))
def get_docstring(self, node, n=None):
if n is None:
n = node[0]
node = node[1:]
if n == symbol.suite:
if len(node) == 1:
return self.get_docstring(node[0])
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.file_input:
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.atom:
if node[0][0] == token.STRING:
s = ''
for t in node:
s = s + eval(t[1])
return s
return None
if n == symbol.stmt or n == symbol.simple_stmt \
or n == symbol.small_stmt:
return self.get_docstring(node[0])
if n in _doc_nodes and len(node) == 1:
return self.get_docstring(node[0])
return None
_doc_nodes = [
symbol.expr_stmt,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
_cmp_types = {
token.LESS : '<',
token.GREATER : '>',
token.EQEQUAL : '==',
token.EQUAL : '==',
token.LESSEQUAL : '<=',
token.GREATEREQUAL : '>=',
token.NOTEQUAL : '!=',
}
_legal_node_types = [
symbol.funcdef,
symbol.classdef,
symbol.stmt,
symbol.small_stmt,
symbol.flow_stmt,
symbol.simple_stmt,
symbol.compound_stmt,
symbol.expr_stmt,
symbol.print_stmt,
symbol.del_stmt,
symbol.pass_stmt,
symbol.break_stmt,
symbol.continue_stmt,
symbol.return_stmt,
symbol.raise_stmt,
symbol.import_stmt,
symbol.global_stmt,
symbol.exec_stmt,
symbol.assert_stmt,
symbol.if_stmt,
symbol.while_stmt,
symbol.for_stmt,
symbol.try_stmt,
symbol.with_stmt,
symbol.suite,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.exprlist,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
symbol.atom,
]
if hasattr(symbol, 'yield_stmt'):
_legal_node_types.append(symbol.yield_stmt)
if hasattr(symbol, 'yield_expr'):
_legal_node_types.append(symbol.yield_expr)
_assign_types = [
symbol.test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
]
_names = {}
for k, v in symbol.sym_name.items():
_names[k] = v
for k, v in token.tok_name.items():
_names[k] = v
def debug_tree(tree):
l = []
for elt in tree:
if isinstance(elt, int):
l.append(_names.get(elt, elt))
elif isinstance(elt, str):
l.append(elt)
else:
l.append(debug_tree(elt))
return l
| 34.470509 | 99 | 0.530935 |
c70ff5ae80a08971f39c04d9258e598cdec06831 | 6,378 | py | Python | tests/test_blake3.py | xkortex/blake3-py | 2a96fd27ad4c9bbba0d1168eb1496a18381017ab | [
"Apache-2.0"
] | null | null | null | tests/test_blake3.py | xkortex/blake3-py | 2a96fd27ad4c9bbba0d1168eb1496a18381017ab | [
"Apache-2.0"
] | null | null | null | tests/test_blake3.py | xkortex/blake3-py | 2a96fd27ad4c9bbba0d1168eb1496a18381017ab | [
"Apache-2.0"
] | null | null | null | import array
from binascii import unhexlify
import json
import numpy
from pathlib import Path
import subprocess
import sys
try:
from blake3 import blake3
except ModuleNotFoundError:
print("Run tests/build.py first.", file=sys.stderr)
raise
HERE = Path(__file__).parent
VECTORS = json.load((HERE / "test_vectors.json").open())
def make_input(length):
b = bytearray(length)
for i in range(len(b)):
b[i] = i % 251
return b
def test_vectors():
cases = VECTORS["cases"]
for case in cases:
input_len = int(case["input_len"])
input_bytes = make_input(input_len)
extended_hash_hex = case["hash"]
extended_keyed_hash_hex = case["keyed_hash"]
extended_derive_key_hex = case["derive_key"]
extended_hash_bytes = unhexlify(extended_hash_hex)
extended_keyed_hash_bytes = unhexlify(extended_keyed_hash_hex)
extended_derive_key_bytes = unhexlify(extended_derive_key_hex)
hash_bytes = extended_hash_bytes[:32]
keyed_hash_bytes = extended_keyed_hash_bytes[:32]
derive_key_bytes = extended_derive_key_bytes[:32]
extended_len = len(extended_hash_bytes)
assert extended_len == len(extended_keyed_hash_bytes)
assert extended_len == len(extended_derive_key_bytes)
# default hash
assert hash_bytes == blake3(input_bytes).digest()
assert extended_hash_bytes == blake3(input_bytes).digest(
length=extended_len)
assert extended_hash_hex == blake3(input_bytes).hexdigest(
length=extended_len)
incremental_hash = blake3()
incremental_hash.update(input_bytes[:input_len // 2])
incremental_hash.update(input_bytes[input_len // 2:])
assert hash_bytes == incremental_hash.digest()
# keyed hash
key = VECTORS["key"].encode()
assert keyed_hash_bytes == blake3(input_bytes, key=key).digest()
assert extended_keyed_hash_bytes == blake3(
input_bytes, key=key).digest(length=extended_len)
assert extended_keyed_hash_hex == blake3(
input_bytes, key=key).hexdigest(length=extended_len)
incremental_keyed_hash = blake3(key=key)
incremental_keyed_hash.update(input_bytes[:input_len // 2])
incremental_keyed_hash.update(input_bytes[input_len // 2:])
assert keyed_hash_bytes == incremental_keyed_hash.digest()
# derive key
context = "BLAKE3 2019-12-27 16:29:52 test vectors context"
assert derive_key_bytes == blake3(input_bytes,
context=context).digest()
assert extended_derive_key_bytes == blake3(
input_bytes, context=context).digest(length=extended_len)
assert extended_derive_key_hex == blake3(
input_bytes, context=context).hexdigest(length=extended_len)
incremental_derive_key = blake3(context=context)
incremental_derive_key.update(input_bytes[:input_len // 2])
incremental_derive_key.update(input_bytes[input_len // 2:])
assert derive_key_bytes == incremental_derive_key.digest()
def test_buffer_types():
expected = blake3(b"foo").digest()
assert expected == blake3(memoryview(b"foo")).digest()
assert expected == blake3(bytearray(b"foo")).digest()
assert expected == blake3(memoryview(bytearray(b"foo"))).digest()
# "B" means unsigned char. See https://docs.python.org/3/library/array.html.
assert expected == blake3(array.array("B", b"foo")).digest()
assert expected == blake3(memoryview(array.array("B", b"foo"))).digest()
# "b" means (signed) char.
assert expected == blake3(array.array("b", b"foo")).digest()
assert expected == blake3(memoryview(array.array("b", b"foo"))).digest()
incremental = blake3()
incremental.update(b"one")
incremental.update(memoryview(b"two"))
incremental.update(bytearray(b"three"))
incremental.update(memoryview(bytearray(b"four")))
incremental.update(array.array("B", b"five"))
incremental.update(memoryview(array.array("B", b"six")))
incremental.update(array.array("b", b"seven"))
incremental.update(memoryview(array.array("b", b"eight")))
assert incremental.digest() == blake3(
b"onetwothreefourfivesixseveneight").digest()
def test_int_array_fails():
try:
# "i" represents the int type, which is larger than a char.
blake3(array.array("i"))
except BufferError:
pass
else:
assert False, "expected a buffer error"
def test_strided_array_fails():
unstrided = numpy.array([1, 2, 3, 4], numpy.uint8)
strided = numpy.lib.stride_tricks.as_strided(unstrided,
shape=[2],
strides=[2])
assert bytes(strided) == bytes([1, 3])
# Unstrided works fine.
blake3(unstrided)
try:
# But strided fails.
blake3(strided)
except BufferError:
pass
else:
assert False, "expected a buffer error"
def test_string_fails():
try:
blake3("a string")
except TypeError:
pass
else:
assert False, "expected a type error"
def test_constants():
import blake3
assert blake3.OUT_LEN == 32
assert blake3.KEY_LEN == 32
def test_example_dot_py():
hello_hash = \
"d74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24"
output = subprocess.run(
[sys.executable, str(HERE / "example.py")],
check=True,
input=b"hello world",
stdout=subprocess.PIPE).stdout.decode().strip()
assert output == hello_hash
def test_xof():
extended = blake3(b"foo").digest(length=100)
for i in range(100):
assert extended[:i] == blake3(b"foo").digest(length=i)
assert extended[i:] == blake3(b"foo").digest(length=100 - i, seek=i)
def test_multithreading():
b = make_input(10**6)
expected = blake3(b).digest()
assert expected == blake3(b, multithreading=True).digest()
incremental = blake3()
incremental.update(b, multithreading=True)
assert expected == incremental.digest()
def test_key_context_incompatible():
zero_key = bytearray(32)
try:
blake3(b"foo", key=zero_key, context="")
except TypeError:
pass
else:
assert False, "expected a type error"
| 34.475676 | 80 | 0.65773 |
a23769ad1636dac6b1a645ab8e2b01866a490245 | 12,038 | py | Python | ewah/operators/sql_base_operator.py | sdaltmann/ewah | 0615af0e1f1825a91258b79fdbf3c2e3a27cb4b8 | [
"MIT"
] | null | null | null | ewah/operators/sql_base_operator.py | sdaltmann/ewah | 0615af0e1f1825a91258b79fdbf3c2e3a27cb4b8 | [
"MIT"
] | null | null | null | ewah/operators/sql_base_operator.py | sdaltmann/ewah | 0615af0e1f1825a91258b79fdbf3c2e3a27cb4b8 | [
"MIT"
] | null | null | null | from ewah.operators.base_operator import EWAHBaseOperator
from ewah.ewah_utils.airflow_utils import airflow_datetime_adjustments
from ewah.constants import EWAHConstants as EC
from datetime import timedelta
from pytz import timezone
class EWAHSQLBaseOperator(EWAHBaseOperator):
template_fields = ('data_from', 'data_until', 'reload_data_from')
# implemented SQL sources - set self.sql_engine to this value in operator
_MYSQL = 'MySQL'
_PGSQL = 'PostgreSQL'
_ORACLE = 'OracleSQL'
_IS_INCREMENTAL = True
_IS_FULL_REFRESH = True
def __init__(self,
source_schema_name=None, # string
source_table_name=None, # string, defaults to same as target_table_name
sql_select_statement=None, # SQL as alternative to source_table_name
data_from=None, # datetime, ISO datetime string, or airflow JINJA macro
data_until=None, # datetime, ISO datetime string, or airflow JINJA macro
use_execution_date_for_incremental_loading=False, # use context instead
# of data_from and data_until
timestamp_column=None, # name of column to increment and/or chunk by
chunking_interval=None, # can be datetime.timedelta or integer
chunking_column=None, # defaults to primary key if integer
# also potentially used: primary_key_column_name of parent operator
reload_data_from=None, # If a new table is added in production, and
# it is loading incrementally, where to start loading data? datetime
reload_data_chunking=None, # must be timedelta
where_clause='1 = 1',
*args, **kwargs):
# allow setting schema in general config w/out throwing an error
if sql_select_statement:
source_schema_name = None
target_table_name = kwargs.get('target_table_name')
if not hasattr(self, 'sql_engine'):
raise Exception('Operator invalid: need attribute sql_engine!')
if reload_data_from and not (reload_data_chunking or chunking_interval):
raise Exception('When setting reload_data_from, must also set ' \
+ 'either reload_data_chunking or chunking_interval!')
if data_from or data_until or \
use_execution_date_for_incremental_loading:
if not timestamp_column:
raise Exception("If you used data_from and/or data_until, you" \
+ " must also use timestamp_column to specify the column" \
+ " that is being used!")
if chunking_interval:
if type(chunking_interval) == timedelta:
if not timestamp_column:
raise Exception("If chunking via timedelta, must supply" \
+ " a timestamp_column, even if not loading " \
+ "incrementally!")
elif type(chunking_interval) == int:
if not chunking_column \
and not kwargs.get('primary_key_column_name'):
# Check columns for primary key - if exactly one,
# use it. Otherwise, raise error.
error_msg = "If chunking via integer, must supply " \
+ "primary_key_column_name OR have EXACTLY ONE " \
+ "primary key defined in the columns_definition dict" \
+ "! This is not the case for: {0}. {{0}}" \
.format(source_table_name)
if not kwargs.get('columns_definition'):
raise Exception(error_msg.format(
'You did not supply the columns_definition dict.'
))
columns = kwargs.get('columns_definition')
if not (sum([
1 if columns[key].get(EC.QBC_FIELD_PK) else 0
for key in columns.keys()
]) == 1):
raise Exception(error_msg.format(
'There is not exactly one primary key in the dict.'
))
for key in columns.keys():
if columns[key].get(EC.QBC_FIELD_PK):
kwargs['primary_key_column_name'] = key
break
chunking_column = chunking_column \
or kwargs['primary_key_column_name']
else:
raise Exception("Arg chunking_interval must be integer or "\
+ "datetime.timedelta!")
self.data_from = data_from
self.data_until = data_until
self.use_execution_date_for_incremental_loading = \
use_execution_date_for_incremental_loading
self.timestamp_column = timestamp_column
self.chunking_interval = chunking_interval
self.chunking_column = chunking_column
self.reload_data_from = reload_data_from
self.reload_data_chunking = reload_data_chunking or chunking_interval
# run after setting class properties for templating
super().__init__(*args, **kwargs)
# self.base_select is a SELECT statement (i.e. a string) ending in a
# WHERE {0} -> the extract process can add conditions!
# self.base_sql is a pure SELECT statement ready to be executed
if sql_select_statement:
err_msg = 'sql_select_statement and {0} cannot' \
' be used in combination!'
if not where_clause == '1 = 1':
raise Exception(err_msg.format('where_clause'))
if source_table_name:
raise Exception(err_msg.format('source_table_name'))
self.base_sql = sql_select_statement
else:
source_table_name = source_table_name or target_table_name
if self.columns_definition:
self.base_sql = self._SQL_BASE.format(**{
'columns': (
'\t'
+ self._SQL_COLUMN_QUOTE
+ (
'{0}\n,\t{0}'
.format(self._SQL_COLUMN_QUOTE)
.join(self.columns_definition.keys())
)
+ self._SQL_COLUMN_QUOTE
),
'schema': source_schema_name,
'table': source_table_name,
'where_clause': where_clause,
})
else:
self.base_sql = self._SQL_BASE.format(**{
'columns': '\t*',
'schema': source_schema_name,
'table': source_table_name,
'where_clause': where_clause,
})
self.base_select = self._SQL_BASE_SELECT.format(**{
'select_sql': self.base_sql,
})
def ewah_execute(self, context):
str_format = '%Y-%m-%dT%H:%M:%SZ'
if not self.drop_and_replace and \
self.use_execution_date_for_incremental_loading:
self.data_from = context['execution_date']
self.data_until = context['next_execution_date']
else:
self.data_from = airflow_datetime_adjustments(self.data_from)
self.data_until = airflow_datetime_adjustments(self.data_until)
self.reload_data_from = \
airflow_datetime_adjustments(self.reload_data_from)
if self.drop_and_replace:
self.log.info('Loading data as full refresh.')
else:
self.data_from = self.data_from or context['execution_date']
n_e_d = context['next_execution_date']
self.data_until = self.data_until or n_e_d
if not self.test_if_target_table_exists():
self.chunking_interval = self.reload_data_chunking \
or self.chunking_interval \
or (self.data_until - self.data_from)
self.data_from = self.reload_data_from \
or context['dag'].start_date
if type(self.data_from) == str:
self.data_from = datetime_from_string(self.data_from)
self.log.info('Incrementally loading data from {0} to {1}.'.format(
self.data_from.strftime(str_format),
self.data_until.strftime(str_format),
))
params = {}
# _SQL_PARAMS
if self.drop_and_replace:
sql_base = self.base_select
if self.data_from:
sql_base = sql_base.format('{0} >= {1} AND {{0}}'.format(
self.timestamp_column,
self._SQL_PARAMS.format('data_from'),
))
params.update({'data_from': self.data_from})
if self.data_until:
sql_base = sql_base.format('{0} <= {1} AND {{0}}'.format(
self.timestamp_column,
self._SQL_PARAMS.format('data_until'),
))
params.update({'data_until': self.data_from})
sql_base = sql_base.format('1 = 1 {0}')
else:
sql_base = self.base_select.format('{0} >= {1} AND {0} < {2} {{0}}'
.format(
self.timestamp_column,
self._SQL_PARAMS.format('data_from'),
self._SQL_PARAMS.format('data_until'),
))
params.update({'data_from': self.data_from})
params.update({'data_until': self.data_until})
if self.chunking_interval:
if isinstance(self.chunking_interval, timedelta):
chunking_column = self.timestamp_column or self.chunking_column
else:
chunking_column = self.chunking_column
if self.drop_and_replace:
previous_chunk, max_chunk = self._get_data_from_sql(
sql=self._SQL_MINMAX_CHUNKS.format(**{
'column': chunking_column,
'base': sql_base.format(''),
}),
params=params,
return_dict=False,
)[0]
if chunking_column == self.timestamp_column:
tz = timezone('UTC')
if not previous_chunk.tzinfo:
previous_chunk = tz.localize(previous_chunk)
if self.data_from:
previous_chunk = max(previous_chunk, self.data_from)
if not max_chunk.tzinfo:
max_chunk = tz.localize(max_chunk)
if self.data_until:
max_chunk = min(max_chunk, self.data_until)
else:
previous_chunk = self.data_from
max_chunk = self.data_until
while previous_chunk <= max_chunk:
params.update({
'from_value': previous_chunk,
'until_value': min(
max_chunk,
previous_chunk + self.chunking_interval,
),
})
data = self._get_data_from_sql(
sql=sql_base.format(
self._SQL_CHUNKING_CLAUSE
).format(**{
'column': chunking_column,
'equal_sign': ('=' if max_chunk < (previous_chunk \
+ self.chunking_interval) else ''),
}),
params=params,
return_dict=True,
)
self.upload_data(data=data)
previous_chunk += self.chunking_interval
else:
self.upload_data(
data=self._get_data_from_sql(
sql=sql_base.format('AND 1 = 1'),
return_dict=True,
params=params,
),
)
| 44.420664 | 80 | 0.543695 |
24c8c6989230799a9ad444daa7991d06c351bde1 | 1,518 | py | Python | server.py | framon/cloudprint | de6595e3aea40a6b96c3efb4f7ba37ace2a61122 | [
"MIT"
] | null | null | null | server.py | framon/cloudprint | de6595e3aea40a6b96c3efb4f7ba37ace2a61122 | [
"MIT"
] | null | null | null | server.py | framon/cloudprint | de6595e3aea40a6b96c3efb4f7ba37ace2a61122 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# (c) Fábio Ramon Lima e Lima - 2012
# framon@monzeu.eti.br
import web
import subprocess
class Printer:
def __init__ (self, name):
self.name = name
self.location = ''
self.description = ''
class PrintersHandler:
def GET(self, slash):
if slash:
raise web.seeother('/printers')
print web.ctx.env.get('HTTP_ACCEPT')
template = web.template.frender('templates/printers.json')
output = template(web.ctx.home, printers.values())
web.header('Content-Type', output.content_type)
return output
class PrinterHandler:
def GET(self, name):
printer = printers[name]
template = web.template.frender('templates/printer.json')
output = template(web.ctx.home, printer)
web.header('Content-Type', output.content_type)
return output
class JobsHandler:
def POST(self, name):
printer = printers[name]
data = web.data()
print type(data)
print data[0:10]
fout = open('job.pdf', 'w')
fout.write(data)
fout.close()
subprocess.call(['lpr', '-H', 'printserver', '-P', name, 'job.pdf'])
template = web.template.frender('templates/printer.json')
output = template(web.ctx.home, printer)
web.header('Content-Type', output.content_type)
return output
paths = (
'/printers(/)?', 'PrintersHandler',
'/printers/([a-z0-9]{4})', 'PrinterHandler',
'/printers/([a-z0-9]{4})/jobs', 'JobsHandler',
)
app = web.application(paths, globals())
printers = {
'pt01' : Printer('pt01'),
'pt0e' : Printer('pt0e')
}
if __name__ == "__main__":
app.run()
| 19.461538 | 70 | 0.670619 |
b57d6e202f1b8b4f2bbd85ab8f3f18f4b8a2e449 | 8,428 | py | Python | adafruit_ht16k33/matrix.py | FoamyGuy/Adafruit_CircuitPython_HT16K33 | fc0fd184e96494f0f44b3baba726f951ce291b05 | [
"MIT"
] | null | null | null | adafruit_ht16k33/matrix.py | FoamyGuy/Adafruit_CircuitPython_HT16K33 | fc0fd184e96494f0f44b3baba726f951ce291b05 | [
"MIT"
] | null | null | null | adafruit_ht16k33/matrix.py | FoamyGuy/Adafruit_CircuitPython_HT16K33 | fc0fd184e96494f0f44b3baba726f951ce291b05 | [
"MIT"
] | null | null | null | # The MIT License (MIT)
#
# Copyright (c) 2016 Radomir Dopieralski & Tony DiCola for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Matrix Displays
================
"""
from adafruit_ht16k33.ht16k33 import HT16K33
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_HT16K33.git"
class Matrix8x8(HT16K33):
"""A single matrix."""
_columns = 8
_rows = 8
def pixel(self, x, y, color=None):
"""Get or set the color of a given pixel."""
if not 0 <= x <= 7:
return None
if not 0 <= y <= 7:
return None
x = (x - 1) % 8
return super()._pixel(x, y, color)
def __getitem__(self, key):
x, y = key
return self.pixel(x, y)
def __setitem__(self, key, value):
x, y = key
self.pixel(x, y, value)
#pylint: disable=too-many-branches
def shift(self, x, y, rotate=False):
"""
Shift pixels by x and y
:param rotate: (Optional) Rotate the shifted pixels to the left side (default=False)
"""
auto_write = self.auto_write
self._auto_write = False
if x > 0: # Shift Right
for _ in range(x):
for row in range(0, self.rows):
last_pixel = self[self.columns - 1, row] if rotate else 0
for col in range(self.columns - 1, 0, -1):
self[col, row] = self[col - 1, row]
self[0, row] = last_pixel
elif x < 0: # Shift Left
for _ in range(-x):
for row in range(0, self.rows):
last_pixel = self[0, row] if rotate else 0
for col in range(0, self.columns - 1):
self[col, row] = self[col + 1, row]
self[self.columns - 1, row] = last_pixel
if y > 0: # Shift Up
for _ in range(y):
for col in range(0, self.columns):
last_pixel = self[col, self.rows - 1] if rotate else 0
for row in range(self.rows - 1, 0, -1):
self[col, row] = self[col, row - 1]
self[col, 0] = last_pixel
elif y < 0: # Shift Down
for _ in range(-y):
for col in range(0, self.columns):
last_pixel = self[col, 0] if rotate else 0
for row in range(0, self.rows - 1):
self[col, row] = self[col, row + 1]
self[col, self.rows - 1] = last_pixel
self._auto_write = auto_write
if auto_write:
self.show()
#pylint: enable=too-many-branches
def shift_right(self, rotate=False):
"""
Shift all pixels right
:param rotate: (Optional) Rotate the shifted pixels to the left side (default=False)
"""
self.shift(1, 0, rotate)
def shift_left(self, rotate=False):
"""
Shift all pixels left
:param rotate: (Optional) Rotate the shifted pixels to the right side (default=False)
"""
self.shift(-1, 0, rotate)
def shift_up(self, rotate=False):
"""
Shift all pixels up
:param rotate: (Optional) Rotate the shifted pixels to bottom (default=False)
"""
self.shift(0, 1, rotate)
def shift_down(self, rotate=False):
"""
Shift all pixels down
:param rotate: (Optional) Rotate the shifted pixels to top (default=False)
"""
self.shift(0, -1, rotate)
def image(self, img):
"""Set buffer to value of Python Imaging Library image. The image should
be in 1 bit mode and a size equal to the display size."""
imwidth, imheight = img.size
if imwidth != self.columns or imheight != self.rows:
raise ValueError('Image must be same dimensions as display ({0}x{1}).' \
.format(self.columns, self.rows))
# Grab all the pixels from the image, faster than getpixel.
pixels = img.convert('1').load()
# Iterate through the pixels
for x in range(self.columns): # yes this double loop is slow,
for y in range(self.rows): # but these displays are small!
self.pixel(x, y, pixels[(x, y)])
if self._auto_write:
self.show()
@property
def columns(self):
"""Read-only property for number of columns"""
return self._columns
@property
def rows(self):
"""Read-only property for number of rows"""
return self._rows
class Matrix16x8(Matrix8x8):
"""The matrix wing."""
_columns = 16
def pixel(self, x, y, color=None):
"""Get or set the color of a given pixel."""
if not 0 <= x <= 15:
return None
if not 0 <= y <= 7:
return None
if x >= 8:
x -= 8
y += 8
return super()._pixel(y, x, color)
class MatrixBackpack16x8(Matrix16x8):
"""A double matrix backpack."""
def pixel(self, x, y, color=None):
"""Get or set the color of a given pixel."""
if not 0 <= x <= 15:
return None
if not 0 <= y <= 7:
return None
return super()._pixel(x, y, color)
class Matrix8x8x2(Matrix8x8):
"""A bi-color matrix."""
LED_OFF = 0
LED_RED = 1
LED_GREEN = 2
LED_YELLOW = 3
def pixel(self, x, y, color=None):
"""Get or set the color of a given pixel."""
if not 0 <= x <= 7:
return None
if not 0 <= y <= 7:
return None
if color is not None:
super()._pixel(y, x, (color >> 1) & 0x01)
super()._pixel(y + 8, x, (color & 0x01))
else:
return super()._pixel(y, x) | super()._pixel(y + 8, x) << 1
return None
def fill(self, color):
"""Fill the whole display with the given color."""
fill1 = 0xff if color & 0x01 else 0x00
fill2 = 0xff if color & 0x02 else 0x00
for i in range(8):
self._set_buffer(i * 2, fill1)
self._set_buffer(i * 2 + 1, fill2)
if self._auto_write:
self.show()
def image(self, img):
"""Set buffer to value of Python Imaging Library image. The image should
be a size equal to the display size."""
imwidth, imheight = img.size
if imwidth != self.columns or imheight != self.rows:
raise ValueError('Image must be same dimensions as display ({0}x{1}).' \
.format(self.columns, self.rows))
# Grab all the pixels from the image, faster than getpixel.
pixels = img.convert('RGB').load()
# Iterate through the pixels
for x in range(self.columns): # yes this double loop is slow,
for y in range(self.rows): # but these displays are small!
if pixels[(x, y)] == (255, 0, 0):
self.pixel(x, y, self.LED_RED)
elif pixels[(x, y)] == (0, 255, 0):
self.pixel(x, y, self.LED_GREEN)
elif pixels[(x, y)] == (255, 255, 0):
self.pixel(x, y, self.LED_YELLOW)
else:
# Unknown color, default to LED off.
self.pixel(x, y, self.LED_OFF)
if self._auto_write:
self.show()
| 36.017094 | 93 | 0.556834 |
30fbd96bb24a584662e869b262541ae4afc32be4 | 7,036 | py | Python | game/user.py | matthewjwhite/crystal-skull | d2702275bb96ab2b18d7f99e89837854a387532b | [
"MIT"
] | null | null | null | game/user.py | matthewjwhite/crystal-skull | d2702275bb96ab2b18d7f99e89837854a387532b | [
"MIT"
] | 5 | 2020-12-21T16:28:00.000Z | 2020-12-23T09:54:30.000Z | game/user.py | matthewjwhite/crystal-skull | d2702275bb96ab2b18d7f99e89837854a387532b | [
"MIT"
] | null | null | null | ''' Code related to user accounts '''
import base64
import copy
import random
import uuid
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_v1_5
import pymongo
from game.config import Config
from game.entity import Entity
from game.map import Location, Map
from game.constants import \
DB_NAME, DB_STR, DB_KEY, DB_CLS, DB_HP, DB_LOC, \
NAME, STR, KEY, CLS, HP, LOC
CONFIG = Config.load()
DB = pymongo.MongoClient('mongo', 27017).game.user
# Avoid accidental multiple occurrences, ie. if multiple creation
# attempts for same user in close occurrence. Overall, enforces
# unique 'name' field value, allowing us to use this as our unique
# identifier for the user.
DB.create_index([('name', pymongo.TEXT)], unique=True)
class BadAuthentication(Exception):
''' Bad authentication attempt '''
class BadMove(Exception):
''' Bad move when navigating '''
class User(Entity):
''' Represents a single user, and helpers for users '''
# Starting location for all users.
start_loc = Location(
mp=Map.get('West Dungeon'), x=50, y=50)
@staticmethod
def from_db(**kwargs):
''' Converts database document to User object '''
data = {
NAME: kwargs[DB_NAME],
CLS: kwargs[DB_CLS],
KEY: kwargs[DB_KEY],
HP: kwargs[DB_HP],
STR: kwargs[DB_STR],
LOC: Location.from_db(**kwargs[DB_LOC])
}
return User(**data)
@classmethod
def get_user(cls, socket):
''' Communicates with client to verify user '''
name = socket.send_wait('Welcome! User?')
if not DB.find({DB_NAME: name}).count():
return cls.create_user(socket, name)
# Instantiate RSA public key object.
key = DB.find_one({DB_NAME: name})[DB_KEY].encode('utf-8')
key = RSA.importKey(key)
# Encrypt random challenge for user to decrypt w/ priv. key.
cipher = PKCS1_v1_5.new(key)
challenge = str(uuid.uuid4())
challenge_bytes = challenge.encode('utf-8')
challenge_bytes = cipher.encrypt(challenge_bytes)
enc = base64.b64encode(challenge_bytes).decode()
# Get attempt, and compare.
attempt = socket.send_wait('Decrypted {}?'.format(enc))
if challenge == attempt:
# DB field names match User constructor parameters.
return User.from_db(**DB.find_one({DB_NAME: name}))
socket.send('Failed to complete challenge!')
raise BadAuthentication('User did not fulfill challenge')
@classmethod
def create_user(cls, socket, name):
''' Communicates with client to create user '''
msg = 'Does not exist! Create (Y/N)?'
resp = socket.send_wait(msg)
if resp.upper() != 'Y':
raise RuntimeError('User does not want to create user')
# Get public key, to be used for further auth.
key = socket.send_wait('Paste your (base64-encoded) '
'public key.')
key = base64.b64decode(key).decode('utf-8')
# Confirm selected class.
classes = CONFIG.get('player/class')
msg = 'Available classes: {}'.format(classes)
player_cls = socket.send_wait(msg)
if player_cls not in classes:
socket.send('Invalid class!')
raise RuntimeError('Invalid class selected')
user = User(name=name, cls=player_cls, key=key,
location=cls.start_loc)
user.save()
return user
def __init__(self, cls, key, location, **kwargs):
''' Param names match constants.py '''
self.cls = cls
self.key = key
self.location = location
super().__init__(**kwargs)
def to_db(self):
''' Translates object to DB document '''
return {
DB_NAME: self.name,
DB_CLS: self.cls,
DB_KEY: self.key,
DB_HP: self.health,
DB_STR: self.strength,
DB_LOC: Location.to_db(self.location)
}
def save(self):
''' Saves current state of user to DB '''
DB.update({DB_NAME: self.name}, {'$set': self.to_db()},
upsert=True)
def battle(self, socket):
''' Carries out a battle against a monster
Returns remaining user HP
'''
monster = copy.deepcopy(random.choice(self.location.map.monsters))
while True:
monster_dmg = monster.hit()
self.lose(monster_dmg)
self.save()
socket.send_nl('{} inflicted {}, you have {} remaining'
.format(monster.name, monster_dmg, self.health))
if self.health <= 0:
break
if socket.send_wait('Flee?').lower() == 'y':
break
dmg = self.hit()
monster.lose(dmg)
socket.send_nl('You inflicted {}, {} has {} remaining'
.format(dmg, monster.name, monster.health))
if monster.health <= 0:
break
if socket.send_wait('Flee?').lower() == 'y':
break
return self.health
def move(self, direction):
''' Moves user within current map or to another map '''
curr_map = self.location.map
next_x = self.location.x
next_y = self.location.y
direction = direction.lower()
if direction == 'n':
next_y += 1
elif direction == 'e':
next_x += 1
elif direction == 's':
next_y -= 1
elif direction == 'w':
next_x -= 1
# Self location coordinates are relative with respect to the current
# map, so they must be checked per the height/width as opposed to the
# map's X and Y min/max, which indicate points in the overall grid.
curr_map_width = curr_map.dim.x_max - curr_map.dim.x_min + 1
curr_map_height = curr_map.dim.y_max - curr_map.dim.y_min + 1
# -1 indicates going off the edge, left or downwards. If we hit the
# width or height, we've gone off the edge right or upwards -
# coordinates start at 0!
if next_x in (-1, curr_map_width) or next_y in (-1, curr_map_height):
# Next step will breach boundary. See if there is even anything
# on the other side.
match_map = Map.match(curr_map.dim.x_min + next_x, curr_map.dim.y_min + next_y)
if match_map:
new_rel_x = curr_map.dim.x_min + next_x - match_map.dim.x_min
new_rel_y = curr_map.dim.y_min + next_y - match_map.dim.y_min
self.location.map = match_map
self.location.x = new_rel_x
self.location.y = new_rel_y
else:
raise BadMove('At boundary with no neighboring map')
else:
# Still within boundary, so continue on within current map.
self.location.x = next_x
self.location.y = next_y
| 32.878505 | 91 | 0.584991 |
b00e728c3509309fc68e8b31b4ebcc06f139da4c | 12,705 | py | Python | fhirclient/models/capabilitystatement_tests.py | zzhou41/client-py | cbfa8d7c7f1bad233b237b7c5582fc0577b21f70 | [
"Apache-2.0"
] | null | null | null | fhirclient/models/capabilitystatement_tests.py | zzhou41/client-py | cbfa8d7c7f1bad233b237b7c5582fc0577b21f70 | [
"Apache-2.0"
] | null | null | null | fhirclient/models/capabilitystatement_tests.py | zzhou41/client-py | cbfa8d7c7f1bad233b237b7c5582fc0577b21f70 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b on 2019-07-29.
# 2019, SMART Health IT.
import os
import io
import unittest
import json
from . import capabilitystatement
from .fhirdate import FHIRDate
class CapabilityStatementTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("CapabilityStatement", js["resourceType"])
return capabilitystatement.CapabilityStatement(js)
def testCapabilityStatement1(self):
inst = self.instantiate_from("capabilitystatement-example.json")
self.assertIsNotNone(inst, "Must have instantiated a CapabilityStatement instance")
self.implCapabilityStatement1(inst)
js = inst.as_json()
self.assertEqual("CapabilityStatement", js["resourceType"])
inst2 = capabilitystatement.CapabilityStatement(js)
self.implCapabilityStatement1(inst2)
def implCapabilityStatement1(self, inst):
self.assertEqual(inst.contact[0].name, "System Administrator")
self.assertEqual(inst.contact[0].telecom[0].system, "email")
self.assertEqual(inst.contact[0].telecom[0].value, "wile@acme.org")
self.assertEqual(inst.copyright, "Copyright © Acme Healthcare and GoodCorp EHR Systems")
self.assertEqual(inst.date.date, FHIRDate("2012-01-04").date)
self.assertEqual(inst.date.as_json(), "2012-01-04")
self.assertEqual(inst.description, "This is the FHIR capability statement for the main EHR at ACME for the private interface - it does not describe the public interface")
self.assertEqual(inst.document[0].documentation, "Basic rules for all documents in the EHR system")
self.assertEqual(inst.document[0].mode, "consumer")
self.assertEqual(inst.document[0].profile, "http://fhir.hl7.org/base/Profilebc054d23-75e1-4dc6-aca5-838b6b1ac81d/_history/b5fdd9fc-b021-4ea1-911a-721a60663796")
self.assertTrue(inst.experimental)
self.assertEqual(inst.fhirVersion, "4.0.0")
self.assertEqual(inst.format[0], "xml")
self.assertEqual(inst.format[1], "json")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.implementation.description, "main EHR at ACME")
self.assertEqual(inst.implementation.url, "http://10.2.3.4/fhir")
self.assertEqual(inst.implementationGuide[0], "http://hl7.org/fhir/us/lab")
self.assertEqual(inst.instantiates[0], "http://ihe.org/fhir/CapabilityStatement/pixm-client")
self.assertEqual(inst.jurisdiction[0].coding[0].code, "US")
self.assertEqual(inst.jurisdiction[0].coding[0].display, "United States of America (the)")
self.assertEqual(inst.jurisdiction[0].coding[0].system, "urn:iso:std:iso:3166")
self.assertEqual(inst.kind, "instance")
self.assertEqual(inst.messaging[0].documentation, "ADT A08 equivalent for external system notifications")
self.assertEqual(inst.messaging[0].endpoint[0].address, "mllp:10.1.1.10:9234")
self.assertEqual(inst.messaging[0].endpoint[0].protocol.code, "mllp")
self.assertEqual(inst.messaging[0].endpoint[0].protocol.system, "http://terminology.hl7.org/CodeSystem/message-transport")
self.assertEqual(inst.messaging[0].reliableCache, 30)
self.assertEqual(inst.messaging[0].supportedMessage[0].definition, "MessageDefinition/example")
self.assertEqual(inst.messaging[0].supportedMessage[0].mode, "receiver")
self.assertEqual(inst.name, "ACME-EHR")
self.assertEqual(inst.patchFormat[0], "application/xml-patch+xml")
self.assertEqual(inst.patchFormat[1], "application/json-patch+json")
self.assertEqual(inst.publisher, "ACME Corporation")
self.assertEqual(inst.purpose, "Main EHR capability statement, published for contracting and operational support")
self.assertEqual(inst.rest[0].compartment[0], "http://hl7.org/fhir/CompartmentDefinition/patient")
self.assertEqual(inst.rest[0].documentation, "Main FHIR endpoint for acem health")
self.assertEqual(inst.rest[0].interaction[0].code, "transaction")
self.assertEqual(inst.rest[0].interaction[1].code, "history-system")
self.assertEqual(inst.rest[0].mode, "server")
self.assertTrue(inst.rest[0].resource[0].conditionalCreate)
self.assertEqual(inst.rest[0].resource[0].conditionalDelete, "not-supported")
self.assertEqual(inst.rest[0].resource[0].conditionalRead, "full-support")
self.assertFalse(inst.rest[0].resource[0].conditionalUpdate)
self.assertEqual(inst.rest[0].resource[0].documentation, "This server does not let the clients create identities.")
self.assertEqual(inst.rest[0].resource[0].interaction[0].code, "read")
self.assertEqual(inst.rest[0].resource[0].interaction[1].code, "vread")
self.assertEqual(inst.rest[0].resource[0].interaction[1].documentation, "Only supported for patient records since 12-Dec 2012")
self.assertEqual(inst.rest[0].resource[0].interaction[2].code, "update")
self.assertEqual(inst.rest[0].resource[0].interaction[3].code, "history-instance")
self.assertEqual(inst.rest[0].resource[0].interaction[4].code, "create")
self.assertEqual(inst.rest[0].resource[0].interaction[5].code, "history-type")
self.assertEqual(inst.rest[0].resource[0].profile, "http://registry.fhir.org/r4/StructureDefinition/7896271d-57f6-4231-89dc-dcc91eab2416")
self.assertTrue(inst.rest[0].resource[0].readHistory)
self.assertEqual(inst.rest[0].resource[0].searchInclude[0], "Organization")
self.assertEqual(inst.rest[0].resource[0].searchParam[0].definition, "http://hl7.org/fhir/SearchParameter/Patient-identifier")
self.assertEqual(inst.rest[0].resource[0].searchParam[0].documentation, "Only supports search by institution MRN")
self.assertEqual(inst.rest[0].resource[0].searchParam[0].name, "identifier")
self.assertEqual(inst.rest[0].resource[0].searchParam[0].type, "token")
self.assertEqual(inst.rest[0].resource[0].searchParam[1].definition, "http://hl7.org/fhir/SearchParameter/Patient-general-practitioner")
self.assertEqual(inst.rest[0].resource[0].searchParam[1].name, "general-practitioner")
self.assertEqual(inst.rest[0].resource[0].searchParam[1].type, "reference")
self.assertEqual(inst.rest[0].resource[0].searchRevInclude[0], "Person")
self.assertEqual(inst.rest[0].resource[0].supportedProfile[0], "http://registry.fhir.org/r4/StructureDefinition/00ab9e7a-06c7-4f77-9234-4154ca1e3347")
self.assertEqual(inst.rest[0].resource[0].type, "Patient")
self.assertFalse(inst.rest[0].resource[0].updateCreate)
self.assertEqual(inst.rest[0].resource[0].versioning, "versioned-update")
self.assertTrue(inst.rest[0].security.cors)
self.assertEqual(inst.rest[0].security.description, "See Smart on FHIR documentation")
self.assertEqual(inst.rest[0].security.service[0].coding[0].code, "SMART-on-FHIR")
self.assertEqual(inst.rest[0].security.service[0].coding[0].system, "http://terminology.hl7.org/CodeSystem/restful-security-service")
self.assertEqual(inst.software.name, "EHR")
self.assertEqual(inst.software.releaseDate.date, FHIRDate("2012-01-04").date)
self.assertEqual(inst.software.releaseDate.as_json(), "2012-01-04")
self.assertEqual(inst.software.version, "0.00.020.2134")
self.assertEqual(inst.status, "draft")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.title, "ACME EHR capability statement")
self.assertEqual(inst.url, "urn:uuid:68D043B5-9ECF-4559-A57A-396E0D452311")
self.assertEqual(inst.useContext[0].code.code, "focus")
self.assertEqual(inst.useContext[0].code.system, "http://terminology.hl7.org/CodeSystem/usage-context-type")
self.assertEqual(inst.useContext[0].valueCodeableConcept.coding[0].code, "positive")
self.assertEqual(inst.useContext[0].valueCodeableConcept.coding[0].system, "http://terminology.hl7.org/CodeSystem/variant-state")
self.assertEqual(inst.version, "20130510")
def testCapabilityStatement2(self):
inst = self.instantiate_from("capabilitystatement-phr-example.json")
self.assertIsNotNone(inst, "Must have instantiated a CapabilityStatement instance")
self.implCapabilityStatement2(inst)
js = inst.as_json()
self.assertEqual("CapabilityStatement", js["resourceType"])
inst2 = capabilitystatement.CapabilityStatement(js)
self.implCapabilityStatement2(inst2)
def implCapabilityStatement2(self, inst):
self.assertEqual(inst.contact[0].telecom[0].system, "url")
self.assertEqual(inst.contact[0].telecom[0].value, "http://hl7.org/fhir")
self.assertEqual(inst.date.date, FHIRDate("2013-06-18").date)
self.assertEqual(inst.date.as_json(), "2013-06-18")
self.assertEqual(inst.description, "Prototype Capability Statement for September 2013 Connectathon")
self.assertEqual(inst.fhirVersion, "4.0.0")
self.assertEqual(inst.format[0], "json")
self.assertEqual(inst.format[1], "xml")
self.assertEqual(inst.id, "phr")
self.assertEqual(inst.kind, "capability")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.name, "PHR Template")
self.assertEqual(inst.publisher, "FHIR Project")
self.assertEqual(inst.rest[0].documentation, "Protoype server Capability Statement for September 2013 Connectathon")
self.assertEqual(inst.rest[0].mode, "server")
self.assertEqual(inst.rest[0].resource[0].interaction[0].code, "read")
self.assertEqual(inst.rest[0].resource[0].interaction[1].code, "search-type")
self.assertEqual(inst.rest[0].resource[0].interaction[1].documentation, "When a client searches patients with no search criteria, they get a list of all patients they have access too. Servers may elect to offer additional search parameters, but this is not required")
self.assertEqual(inst.rest[0].resource[0].type, "Patient")
self.assertEqual(inst.rest[0].resource[1].interaction[0].code, "read")
self.assertEqual(inst.rest[0].resource[1].interaction[1].code, "search-type")
self.assertEqual(inst.rest[0].resource[1].searchParam[0].documentation, "_id parameter always supported. For the connectathon, servers may elect which search parameters are supported")
self.assertEqual(inst.rest[0].resource[1].searchParam[0].name, "_id")
self.assertEqual(inst.rest[0].resource[1].searchParam[0].type, "token")
self.assertEqual(inst.rest[0].resource[1].type, "DocumentReference")
self.assertEqual(inst.rest[0].resource[2].interaction[0].code, "read")
self.assertEqual(inst.rest[0].resource[2].interaction[1].code, "search-type")
self.assertEqual(inst.rest[0].resource[2].searchParam[0].documentation, "Standard _id parameter")
self.assertEqual(inst.rest[0].resource[2].searchParam[0].name, "_id")
self.assertEqual(inst.rest[0].resource[2].searchParam[0].type, "token")
self.assertEqual(inst.rest[0].resource[2].type, "Condition")
self.assertEqual(inst.rest[0].resource[3].interaction[0].code, "read")
self.assertEqual(inst.rest[0].resource[3].interaction[1].code, "search-type")
self.assertEqual(inst.rest[0].resource[3].searchParam[0].documentation, "Standard _id parameter")
self.assertEqual(inst.rest[0].resource[3].searchParam[0].name, "_id")
self.assertEqual(inst.rest[0].resource[3].searchParam[0].type, "token")
self.assertEqual(inst.rest[0].resource[3].searchParam[1].documentation, "which diagnostic discipline/department created the report")
self.assertEqual(inst.rest[0].resource[3].searchParam[1].name, "service")
self.assertEqual(inst.rest[0].resource[3].searchParam[1].type, "token")
self.assertEqual(inst.rest[0].resource[3].type, "DiagnosticReport")
self.assertEqual(inst.rest[0].security.service[0].text, "OAuth")
self.assertEqual(inst.software.name, "ACME PHR Server")
self.assertEqual(inst.status, "draft")
self.assertEqual(inst.text.status, "generated")
| 71.376404 | 275 | 0.707281 |
f380f8ada801beac3f8215b3c34d8a435455a68f | 268 | py | Python | app/user/urls.py | Ivanrputra/recipe-app-api | e70548a4f7f8756f8f4224dcf39b809a8b546da1 | [
"MIT"
] | null | null | null | app/user/urls.py | Ivanrputra/recipe-app-api | e70548a4f7f8756f8f4224dcf39b809a8b546da1 | [
"MIT"
] | null | null | null | app/user/urls.py | Ivanrputra/recipe-app-api | e70548a4f7f8756f8f4224dcf39b809a8b546da1 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
app_name = 'user'
urlpatterns = [
path('create/',views.CreateUserView.as_view(),name='create'),
path('token/',views.CreateTokenView.as_view(),name='token'),
path('me/',views.ManageUserView.as_view(),name='me'),
]
| 22.333333 | 62 | 0.712687 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.