text stringlengths 0 1.05M | meta dict |
|---|---|
__author__ = 'adeb'
from spynet.models.layer_block import *
import spynet.models.neuron_type as neuron_type
from spynet.models.layer import *
from spynet.models.network import Network
class NetworkMNIST(Network):
"""
2D convnet for MNIST dataset
"""
def __init__(self):
Network.__init__(self)
self.in_width = None
self.in_height = None
def init(self, patch_height, patch_width, n_out):
Network.init_common(self, patch_height*patch_width, n_out)
self.in_height = patch_height
self.in_width = patch_width
neuron_relu = neuron_type.NeuronRELU()
# Layer 0
kernel_height0 = 5
kernel_width0 = 5
pool_size_height0 = 2
pool_size_width0 = 2
n_kern0 = 20
block0 = LayerBlockConvPool2D(neuron_relu,
in_shape=(1, patch_height, patch_width),
flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
poolsize=(pool_size_height0, pool_size_width0))
# Layer 1
filter_map_height1 = (patch_height - kernel_height0 + 1) / pool_size_height0
filter_map_width1 = (patch_width - kernel_width0 + 1) / pool_size_width0
kernel_height1 = 5
kernel_width1 = 5
pool_size_height1 = 2
pool_size_width1 = 2
n_kern1 = 50
block1 = LayerBlockConvPool2D(neuron_relu,
in_shape=(n_kern0, filter_map_height1, filter_map_width1),
flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
poolsize=(pool_size_height1, pool_size_width1))
# Layer 2
filter_map_height2 = (filter_map_height1 - kernel_height1 + 1) / pool_size_height1
filter_map_with2 = (filter_map_width1 - kernel_width1 + 1) / pool_size_width1
n_in2 = n_kern1 * filter_map_height2 * filter_map_with2
n_out2 = 500
block2 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
# Layer 3
block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out2, n_out=self.n_out)
self.ls_layers = convert_blocks_into_feed_forward_layers([block0, block1, block2, block3])
self.ls_params = []
for l in self.ls_layers:
self.ls_params += l.params
def save_parameters_virtual(self, h5file):
h5file.attrs['in_height'] = self.in_height
h5file.attrs['in_width'] = self.in_width
def load_parameters_virtual(self, h5file):
self.in_height = int(h5file.attrs["in_height"])
self.in_width = int(h5file.attrs["in_width"])
self.init(self.in_height, self.in_width, self.n_out) | {
"repo_name": "adbrebs/spynet",
"path": "mnist_example/network_mnist.py",
"copies": "1",
"size": "2786",
"license": "bsd-2-clause",
"hash": 2977853251196921300,
"line_mean": 37.1780821918,
"line_max": 101,
"alpha_frac": 0.5814788227,
"autogenerated": false,
"ratio": 3.3688029020556227,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9437927861667321,
"avg_score": 0.00247077261766015,
"num_lines": 73
} |
__author__ = 'adeb'
from theano import tensor as T
class NeuronType():
"""
Abstract class defining a neuron type. This class defines the activation function of the neuron.
"""
name = None
def __init__(self):
pass
def activation_function(self, x):
raise NotImplementedError
def __str__(self):
return "Neuron type: {}".format(self.name)
class NeuronLinear(NeuronType):
name = "Linear"
def __init__(self):
NeuronType.__init__(self)
def activation_function(self, x):
return x
class NeuronTanh(NeuronType):
name = "Tanh"
def __init__(self):
NeuronType.__init__(self)
def activation_function(self, x):
return T.tanh(x)
class NeuronSigmoid(NeuronType):
name = "Sigmoid"
def __init__(self):
NeuronType.__init__(self)
def activation_function(self, x):
return T.nnet.sigmoid(x)
class NeuronSoftmax(NeuronType):
name = "SoftMax"
def __init__(self):
NeuronType.__init__(self)
def activation_function(self, x):
return T.nnet.softmax(x)
class NeuronRELU(NeuronType):
"""
Rectified linear unit
"""
name = "RELU"
def __init__(self):
NeuronType.__init__(self)
def activation_function(self, x):
return T.switch(x > 0., x, 0)
| {
"repo_name": "adbrebs/spynet",
"path": "models/neuron_type.py",
"copies": "1",
"size": "1337",
"license": "bsd-2-clause",
"hash": -4655806004862658000,
"line_mean": 17.5694444444,
"line_max": 100,
"alpha_frac": 0.5923709798,
"autogenerated": false,
"ratio": 3.5653333333333332,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9657016744376457,
"avg_score": 0.0001375137513751375,
"num_lines": 72
} |
__author__ = 'adeb'
# Hack to be able to run this module
import sys
import os
sys.path.insert(0, os.path.abspath('../..'))
from shutil import copy2
import inspect
from spynet.data.dataset import Dataset
from spynet.models.network import *
from spynet.models.neuron_type import *
from spynet.training.trainer import *
from spynet.training.monitor import *
from spynet.training.parameters_selector import *
from spynet.training.stopping_criterion import *
from spynet.training.cost_function import *
from spynet.training.learning_update import *
from spynet.experiment import Experiment
from transform_mnist_to_h5 import transform_mnist_to_h5
class ExperimentMNIST(Experiment):
def __init__(self, exp_name, data_path):
Experiment.__init__(self, exp_name, data_path)
def copy_file_virtual(self):
copy2(inspect.getfile(inspect.currentframe()), self.path)
def run(self):
###### Create the datasets
training_data_path = self.data_path + "train.h5"
testing_data_path = self.data_path + "test.h5"
# If files don't already exist, create them
if not os.path.isfile(training_data_path):
transform_mnist_to_h5()
prop_validation = 0.15 # Percentage of the training dataset that is used for validation (early stopping)
ds_training = Dataset.create_and_read(training_data_path)
ds_validation, ds_training = ds_training.split_dataset_proportions([prop_validation, 1-prop_validation])
ds_testing = Dataset.create_and_read(testing_data_path)
ds_training.outputs = ds_training.inputs
ds_validation.outputs = ds_validation.inputs
ds_testing.outputs = ds_testing.inputs
# Scale the data
# s = Scaler([slice(None, None)])
# s.compute_parameters(ds_training.inputs)
# s.scale(ds_training.inputs)
# s.scale(ds_validation.inputs)
# s.scale(ds_testing.inputs)
###### Create the network
# net = NetworkMNIST()
net = AutoEncoder()
net.init([28**2, 256, 28**2], dropout=True, dropout_p=[0.5], neuron_function=NeuronSigmoid())
print net
###### Configure the trainer
# Cost function
cost_function = CostMSE()
# Learning update
learning_rate = 0.01
momentum = 0.5
lr_update = LearningUpdateGDMomentum(learning_rate, momentum)
# Create monitors and add them to the trainer
err_training = MonitorMSE(1, "Training", ds_training)
err_testing = MonitorMSE(1, "Testing", ds_testing)
err_validation = MonitorMSE(1, "Validation", ds_validation)
# Create stopping criteria and add them to the trainer
max_epoch = MaxEpoch(50)
early_stopping = EarlyStopping(err_validation)
# Create the network selector
params_selector = ParamSelectorBestMonitoredValue(err_validation)
# Create the trainer object
batch_size = 20
t = Trainer(net, cost_function, params_selector, [max_epoch, early_stopping],
lr_update, ds_training, batch_size,
[err_training, err_testing, err_validation])
###### Train the network
t.train()
###### Plot the records
save_records_plot(self.path, [err_training, err_testing, err_validation], "errors", t.n_train_batches)
###### Save the network
net.save_parameters(self.path + "netdrop.net")
if __name__ == '__main__':
exp_name = "mnist_example"
data_path = "./datasets/mnist/"
exp = ExperimentMNIST(exp_name, data_path)
exp.run() | {
"repo_name": "adbrebs/spynet",
"path": "mnist_example/experiment_mnist_autoencoder.py",
"copies": "1",
"size": "3612",
"license": "bsd-2-clause",
"hash": -3795537665780356600,
"line_mean": 32.1467889908,
"line_max": 113,
"alpha_frac": 0.6478405316,
"autogenerated": false,
"ratio": 3.712230215827338,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4860070747427338,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeb'
# Hack to be able to run this module
import sys
import os
sys.path.insert(0, os.path.abspath('../..'))
from shutil import copy2
import inspect
from spynet.utils.utilities import analyse_classes
from spynet.data.dataset import Dataset, Scaler
from spynet.mnist_example.network_mnist import NetworkMNIST
from spynet.models.network import *
from spynet.training.trainer import *
from spynet.training.monitor import *
from spynet.training.parameters_selector import *
from spynet.training.stopping_criterion import *
from spynet.training.cost_function import *
from spynet.training.learning_update import *
from spynet.experiment import Experiment
from transform_mnist_to_h5 import transform_mnist_to_h5
class ExperimentMNIST(Experiment):
def __init__(self, exp_name, data_path):
Experiment.__init__(self, exp_name, data_path)
def copy_file_virtual(self):
copy2(inspect.getfile(inspect.currentframe()), self.path)
def run(self):
###### Create the datasets
training_data_path = self.data_path + "train.h5"
testing_data_path = self.data_path + "test.h5"
# If files don't already exist, create them
if not os.path.isfile(training_data_path):
transform_mnist_to_h5()
prop_validation = 0.3 # Percentage of the training dataset that is used for validation (early stopping)
ds_training = Dataset.create_and_read(training_data_path)
ds_validation, ds_training = ds_training.split_dataset_proportions([prop_validation, 1-prop_validation])
ds_testing = Dataset.create_and_read(testing_data_path)
# Few stats about the targets
analyse_classes(np.argmax(ds_training.outputs, axis=1), "Training data")
# Scale the data
s = Scaler([slice(None, None)])
s.compute_parameters(ds_training.inputs)
s.scale(ds_training.inputs)
s.scale(ds_validation.inputs)
s.scale(ds_testing.inputs)
###### Create the network
net = NetworkMNIST()
net.init(28, 28, 10)
print net
###### Configure the trainer
# Cost function
cost_function = CostNegLL()
# Learning update
learning_rate = 0.13
momentum = 0.5
lr_update = LearningUpdateGDMomentum(learning_rate, momentum)
# Create monitors and add them to the trainer
err_training = MonitorErrorRate(1, "Training", ds_training)
err_testing = MonitorErrorRate(1, "Testing", ds_testing)
err_validation = MonitorErrorRate(1, "Validation", ds_validation)
# Create stopping criteria and add them to the trainer
max_epoch = MaxEpoch(300)
early_stopping = EarlyStopping(err_validation)
# Create the network selector
params_selector = ParamSelectorBestMonitoredValue(err_validation)
# Create the trainer object
batch_size = 200
t = Trainer(net, cost_function, params_selector, [max_epoch, early_stopping],
lr_update, ds_training, batch_size,
[err_training, err_testing, err_validation])
###### Train the network
t.train()
###### Plot the records
save_records_plot(self.path, [err_training, err_testing, err_validation], "errors", t.n_train_batches)
###### Save the network
net.save_parameters(self.path + "net.net")
if __name__ == '__main__':
exp_name = "mnist_example"
data_path = "./datasets/mnist/"
exp = ExperimentMNIST(exp_name, data_path)
exp.run() | {
"repo_name": "adbrebs/spynet",
"path": "mnist_example/experiment_mnist.py",
"copies": "1",
"size": "3566",
"license": "bsd-2-clause",
"hash": 5055568935549278000,
"line_mean": 31.7247706422,
"line_max": 112,
"alpha_frac": 0.6561974201,
"autogenerated": false,
"ratio": 3.761603375527426,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.988675841685271,
"avg_score": 0.006208475754943147,
"num_lines": 109
} |
__author__ = 'adeb'
import math
import numpy as np
import theano.tensor as T
from spynet.utils.utilities import share
class CostFunction():
"""
Cost function used during the training
"""
def __init__(self):
pass
def compute_cost_symb(self, pred_batch, tg_batch):
"""
Compute the cost symbolically with Theano tensors.
Args:
pred_batch (theano.tensor.TensorType): predicted output returned by the network
tg_batch (theano.tensor.TensorType): output returned by the network
Return:
(theano.tensor.TensorType): a tensor representing the cost
"""
raise NotImplementedError
def compute_cost_numpy(self, pred_batch, tg_batch):
"""
Compute the cost given numpy variables.
"""
raise NotImplementedError
@staticmethod
def factory(**kwargs):
"""
Factory function to create a cost function from a dictionary.
"""
update_type = kwargs["type"]
if update_type == "MSE":
cost_function = CostMSE()
elif update_type == "NLL":
cost_function = CostNegLL()
else:
raise Exception("No cost function with this name. Check the config file.")
return cost_function
class CostMSE(CostFunction):
"""
Mean square error
"""
def __init__(self):
CostFunction.__init__(self)
def compute_cost_symb(self, pred_batch, tg_batch):
return T.mean(T.sum((pred_batch - tg_batch) * (pred_batch - tg_batch), axis=1))
def compute_cost_numpy(self, pred_batch, tg_batch):
return np.mean(np.sum((pred_batch - tg_batch) * (pred_batch - tg_batch), axis=1))
class CostNegLL(CostFunction):
"""
Negative log-likelihood
"""
def __init__(self):
CostFunction.__init__(self)
def compute_cost_symb(self, pred_batch, tg_batch):
return -T.mean(T.log(T.sum(pred_batch * tg_batch, axis=1)))
def compute_cost_numpy(self, pred_batch, tg_batch):
return -np.mean(np.sum(math.log(pred_batch) * tg_batch, axis=1))
class CostNegLLWeighted(CostFunction):
"""
Negative log-likelihood
"""
def __init__(self, volumes):
CostFunction.__init__(self)
self.m = share(np.max(volumes))
self.n = share(np.min(volumes))
volumes = np.concatenate([np.array([0]), volumes])
self.volumes = share(volumes)
def compute_cost_symb(self, pred_batch, tg_batch):
a = T.argmax(pred_batch, axis=1)
b = T.argmax(tg_batch, axis=1)
weights = 1 + 10 * (self.volumes[a] / self.volumes[b]) * (self.n/self.m)
return -T.mean(weights * T.log(T.sum(pred_batch * tg_batch, axis=1)))
def compute_cost_numpy(self, pred_batch, tg_batch):
return -np.mean(np.sum(math.log(pred_batch) * tg_batch, axis=1))
def test(self):
pred_batch = share(np.reshape(np.array([0, 0.2, 0.8, 0, 0.6, 0.4]), (2,3)))
tg_batch = share(np.reshape(np.array([0, 0, 1, 0, 0, 1]), (2,3)))
a = T.argmax(pred_batch, axis=1)
b = T.argmax(tg_batch, axis=1)
weights = 1 + 10 * (self.volumes[a] / self.volumes[b]) * (self.n/self.m)
return -T.mean(weights * T.log(T.sum(pred_batch * tg_batch, axis=1))) | {
"repo_name": "adbrebs/spynet",
"path": "training/cost_function.py",
"copies": "1",
"size": "3287",
"license": "bsd-2-clause",
"hash": 4222138379884774400,
"line_mean": 30.6153846154,
"line_max": 91,
"alpha_frac": 0.5938545786,
"autogenerated": false,
"ratio": 3.449108079748164,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9537201121141412,
"avg_score": 0.00115230744135042,
"num_lines": 104
} |
__author__ = 'adeb'
import numpy as np
from spynet.utils.utilities import distrib_balls_in_bins
def create_pick_voxel(config_ini):
"""
Factory function to create the objects responsible for picking the voxels
"""
where_vx = config_ini.pick_vx["where"]
how_vx = config_ini.pick_vx["how"]
if where_vx == "anywhere":
select_region = SelectWholeBrain()
elif where_vx == "plane":
axis = config_ini.pick_vx["axis"]
plane = config_ini.pick_vx["plane"]
select_region = SelectPlane(axis, plane)
else:
print "error in pick_voxel"
return
n_patch_per_voxel = 1
if how_vx == "all":
extract_voxel = ExtractVoxelAll(n_patch_per_voxel)
else:
if how_vx == "random":
extract_voxel = ExtractVoxelRandomly(n_patch_per_voxel)
elif how_vx == "balanced":
extract_voxel = ExtractVoxelBalanced(n_patch_per_voxel)
else:
print "error in pick_voxel"
return
return PickVoxel(select_region, extract_voxel)
class PickVoxel():
"""
Manage the selection and extraction of voxels in an mri image
"""
def __init__(self, select_region, extract_voxel):
self.select_region = select_region
self.extract_voxel = extract_voxel
def pick(self, n_vx, label, verbose=False, batch_size=10000):
# Select the region in which voxels are going to be extracted
idx_region = self.select_region.select(label)
region = label.ravel()[idx_region]
# Once the region is selected, extract the voxels
if n_vx is None:
n_vx = len(idx_region)
return self.extract_voxel.extract(n_vx, idx_region, region, label.shape, batch_size, verbose)
class SelectRegion():
"""
Select a specific spatial region of the mri image in which voxels will later be extracted
"""
def __init__(self):
pass
def select(self, label):
raise NotImplementedError
class SelectWholeBrain(SelectRegion):
"""
Select the whole labelled brain
"""
def __init__(self):
SelectRegion.__init__(self)
def select(self, label):
return label.ravel().nonzero()[0]
class SelectPlane(SelectRegion):
"""
Select a specific orthogonal plane defined by an axis (the plane is orthogonal to this axis) and a specific axis
coordinate.
"""
def __init__(self, axis, axis_coordinate):
SelectRegion.__init__(self)
self.axis = axis
self.axis_coordinate = axis_coordinate
def select(self, label):
plan = np.zeros(label.shape, dtype=float)
slice_axis = [slice(None)] * 3
slice_axis[self.axis] = self.axis_coordinate
plan[slice_axis] = label[slice_axis]
return plan.ravel().nonzero()[0]
class ExtractVoxel():
"""
This class extract voxels from a given region of the mri image
"""
def __init__(self, n_repeat):
self.n_repeat = n_repeat
def extract(self, n_vx, idx_region, region, shape, batch_size, verbose=False):
n_batches, last_batch_size = divmod(n_vx, batch_size)
def extract_inner(vx_id, batch_size_inner):
vx_idx = self.extract_batch_virtual(vx_id, batch_size_inner, idx_region, region)
if self.n_repeat > 1:
vx_idx = np.repeat(vx_idx, self.n_repeat)
return np.asarray(np.unravel_index(vx_idx, shape), dtype=int).T
for b in xrange(n_batches):
vx_id = b*batch_size
if verbose:
print " voxels [{} - {}] / {}".format(vx_id, vx_id + batch_size, n_vx)
yield extract_inner(vx_id, batch_size)
vx_id = n_batches*batch_size
if verbose:
print " voxels [{} - {}] / {}".format(vx_id, vx_id + last_batch_size, n_vx)
yield extract_inner(vx_id, last_batch_size)
def extract_batch_virtual(self, vx_id, batch_size, idx_region, region):
raise NotImplementedError
class ExtractVoxelRandomly(ExtractVoxel):
"""
Uniform spatial distribution of the patches
"""
def __init__(self, n_repeat):
ExtractVoxel.__init__(self, n_repeat)
def extract_batch_virtual(self, vx_id, batch_size, idx_region, region):
r = np.random.randint(idx_region.size, size=batch_size)
return idx_region[r]
class ExtractVoxelBalanced(ExtractVoxel):
"""
Same number of voxels per class
"""
def __init__(self, n_repeat):
ExtractVoxel.__init__(self, n_repeat)
def extract_batch_virtual(self, vx_id, batch_size, idx_region, region):
vx_idx = np.zeros((batch_size,), dtype=int)
# Compute the number of voxels for each region
classes_present = np.unique(region)
n_classes_present = len(classes_present)
voxels_per_region = distrib_balls_in_bins(batch_size, n_classes_present)
vx_counter = 0
for id_k, k in enumerate(classes_present):
if voxels_per_region[id_k] == 0:
continue
sub_region = np.where(region == k)[0]
r = np.random.randint(len(sub_region), size=voxels_per_region[id_k])
vx_counter_next = vx_counter + voxels_per_region[id_k]
vx_idx[vx_counter:vx_counter_next] = idx_region[sub_region[r]]
vx_counter = vx_counter_next
return vx_idx
class ExtractVoxelAll(ExtractVoxel):
"""
Extract all the possible voxels from the mri region
"""
def __init__(self, n_repeat):
ExtractVoxel.__init__(self, n_repeat)
def extract_batch_virtual(self, vx_id, batch_size, idx_region, region):
return idx_region[vx_id:vx_id+batch_size]
class ExtractVoxelBoundaries(ExtractVoxel):
def __init__(self, n_repeat, weights):
ExtractVoxel.__init__(self, n_repeat)
self.weights = weights
def extract_batch_virtual(self, vx_id, batch_size, idx_region, region):
return np.array(np.ravel(self.weights).cumsum().searchsorted(np.random.sample(len(vx_id)))) | {
"repo_name": "adbrebs/spynet",
"path": "data/utils_3d/pick_voxel.py",
"copies": "1",
"size": "6036",
"license": "bsd-2-clause",
"hash": -2254845302011138800,
"line_mean": 31.4569892473,
"line_max": 116,
"alpha_frac": 0.6148111332,
"autogenerated": false,
"ratio": 3.4890173410404626,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9600660514541011,
"avg_score": 0.0006335919398904808,
"num_lines": 186
} |
__author__ = 'adeb'
import numpy as np
class ParamSelector():
"""
These classes determine on which basis the final trained network is selected.
"""
def __init__(self):
self.best_params = None
self.best_iter = None
self.net = None
def init(self, net):
self.net = net
def update(self, iteration, monitored_value):
raise NotImplementedError
def update_network(self):
self.net.import_params(self.best_params)
class ParamSelectorBestMonitoredValue(ParamSelector):
"""
The final network is the one with the best monitored value.
"""
def __init__(self, monitor):
ParamSelector.__init__(self)
monitor.set_param_selector(self)
self.monitor = monitor
if self.monitor.is_a_better_than_b(2,1):
self.best_monitored_value = -np.inf
else:
self.best_monitored_value = np.inf
def update(self, iteration, monitored_value):
if self.monitor.is_a_better_than_b(monitored_value, self.best_monitored_value):
self.best_monitored_value = monitored_value
self.best_params = self.net.export_params()
self.best_iter = iteration | {
"repo_name": "adbrebs/spynet",
"path": "training/parameters_selector.py",
"copies": "1",
"size": "1213",
"license": "bsd-2-clause",
"hash": -5392325664208795000,
"line_mean": 27.2325581395,
"line_max": 87,
"alpha_frac": 0.6248969497,
"autogenerated": false,
"ratio": 3.8753993610223643,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9992123902460289,
"avg_score": 0.001634481652415028,
"num_lines": 43
} |
__author__ = 'adeb'
import numpy as np
class StoppingCriterion():
"""
Abstract class defining a stopping criterion for the trainer
"""
def __init__(self):
pass
def init(self):
pass
def check_if_stop(self, epoch, minibatch_idx, id_minibatch, verbose=True):
"""
Check if the stopping criterion is triggered or not.
"""
raise NotImplementedError
class MaxEpoch(StoppingCriterion):
"""
Stopping criterion that triggers when a maximal number of epochs is reached
"""
def __init__(self, max_epoch):
StoppingCriterion.__init__(self)
self.max_epoch = max_epoch
def check_if_stop(self, epoch, minibatch_idx, id_minibatch, verbose=True):
if epoch < self.max_epoch:
return False
if verbose:
print("Stopping criterion triggered: maximum number of epoch reached")
return True
class EarlyStopping(StoppingCriterion):
"""
Stopping criterion monitoring a monitor. When the monitored value changes, the monitor updates the EarlyStopping
object.
"""
def __init__(self, monitor, patience_increase=5, improvement_threshold=0.99, initial_patience=5):
StoppingCriterion.__init__(self)
self.monitor = monitor
self.patience_increase = patience_increase
self.improvement_threshold = improvement_threshold
self.initial_patience = initial_patience
self.patience = None
self.stopping = None
self.best_monitor_value = None
self.init()
def init(self):
self.patience = self.initial_patience
# Link the monitor to the stopping criterion
self.monitor.add_stopping_criteria([self])
# Save the best monitored value
if self.monitor.is_a_better_than_b(2,1):
self.best_monitor_value = -np.inf
else:
self.best_monitor_value = np.inf
# Indicates if the stopping criterion is triggered or not
self.stopping = False
def update(self, epoch, minibatch_idx, id_minibatch, verbose):
"""
This function is called by a the Monitor object that the Stopping Criterion monitors.
"""
### Triggered
if self.patience <= epoch:
self.stopping = True
return
### Not triggered yet
# Fetch the monitor value
(id_monitoring, monitored_value) = (self.monitor.history_minibatch[-1], self.monitor.history_value[-1])
if not self.monitor.is_a_better_than_b(monitored_value, self.best_monitor_value, self.improvement_threshold):
return
# Increase the patience is the value has sufficiently increased
self.patience = epoch + self.patience_increase
print(" patience increased")
# save the monitored value and the corresponding parameters of the network
self.best_monitor_value = monitored_value
def check_if_stop(self, epoch, minibatch_idx, id_minibatch, verbose=True):
# In case we stop, loads the best parameters
if self.stopping and verbose:
print("Stopping criterion triggered: out of patience")
return self.stopping | {
"repo_name": "adbrebs/spynet",
"path": "training/stopping_criterion.py",
"copies": "1",
"size": "3216",
"license": "bsd-2-clause",
"hash": -8398472261639024000,
"line_mean": 30.5392156863,
"line_max": 117,
"alpha_frac": 0.6380597015,
"autogenerated": false,
"ratio": 4.091603053435114,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5229662754935114,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeb'
import numpy as np
def create_pick_features(config):
"""
Factory function to create the objects responsible for picking the patches
"""
ls_pick_features = []
for pick_features_dictionary in config.pick_features:
ls_pick_features.extend(create_pick_features_from_dict(pick_features_dictionary))
pick_features = PickComposed(ls_pick_features)
return pick_features
def create_pick_features_from_dict(pick_feature_dictionary):
"""
Factory function to create the objects responsible for picking the patches
"""
ls_pick_features = []
how = pick_feature_dictionary["how"]
## Patch-based features
if how == "3D":
patch_width = pick_feature_dictionary["patch_width"]
scale = pick_feature_dictionary["scale"]
ls_pick_features.append(PickPatch3D(patch_width, scale))
elif how == "2Dortho":
ls_axis = pick_feature_dictionary["axis"]
patch_width = pick_feature_dictionary["patch_width"]
scale = pick_feature_dictionary["scale"]
for axis in ls_axis:
ls_pick_features.append(PickPatch2D(patch_width, axis, scale))
# elif how_patch == "2DorthoRotated":
# axis = config_ini.pick_patch["axis"]
# max_degree_rotation = config_ini.pick_patch["max_degree_rotation"]
# pick_patch = PickPatchSlightlyRotated(patch_width, axis, max_degree_rotation)
# elif how == "grid_patches":
# patch_width = pick_feature_dictionary["patch_width"]
# ls_pick_features.append(PickLocalGridOfPatches(patch_width))
## Geometric features
elif how == "centroid":
n_features = pick_feature_dictionary["n_features"]
ls_pick_features.append(PickCentroidDistances(n_features))
elif how == "xyz":
ls_pick_features.append(PickXYZ())
else:
print "pick_features not specified"
return
return ls_pick_features
class PickFeatures():
"""
Manage the selection and extraction of patches in an mri image from their central voxels
"""
def __init__(self, n_features, required_pad=0, n_types_of_features=1):
self.n_features = n_features
self.required_pad = required_pad
self.n_types_of_features = n_types_of_features
def pick(self, vx, mri, label, region_centroids=None):
"""
Returns a list of tuples of the form (t0, ...), t0 being the extracted features. Additional information can be
added in each tuple.
"""
raise NotImplementedError
def has_instance_of(self, class_object):
return isinstance(self, class_object)
class PickXYZ(PickFeatures):
def __init__(self):
PickFeatures.__init__(self, 3)
def pick(self, vx, mri, label, region_centroids=None):
return vx, None
class PickCentroidDistances(PickFeatures):
def __init__(self, n_features):
PickFeatures.__init__(self, n_features)
def pick(self, vx, mri, label, region_centroids=None):
n_points = vx.shape[0]
distances = np.zeros((n_points, self.n_features))
for i in xrange(n_points):
distances[i] = region_centroids.compute_scaled_distances(vx[i])
return distances, None
class PickPatch(PickFeatures):
def __init__(self, n_in, patch_width, scale=1):
PickFeatures.__init__(self, n_in)
self.patch_width = patch_width
self.scale = scale
self.required_pad = 1 + patch_width * scale / 2
def pick(self, vx, mri, label, region_centroids=None):
n_vx = vx.shape[0]
idx_patch = np.zeros((n_vx, self.n_features), dtype=int)
patch = np.zeros((n_vx, self.n_features), dtype=np.float32)
for i in xrange(n_vx):
patch_temp = self.extract_patch(mri, vx[i], self.scale * self.patch_width)
patch_temp = self.rebin(patch_temp, (self.patch_width,) * len(patch_temp.shape))
patch[i, :] = patch_temp.ravel()
return patch, [idx_patch]
def extract_patch(self, mri, vx, patch_width):
raise NotImplementedError
def rebin(self, patch, new_shape):
"""
Convert patch into a new patch of shape new_shape by averaging the pixels.
"""
ls_sh = []
for sh_old, sh_new in zip(patch.shape, new_shape):
ls_sh.extend([sh_new, sh_old//sh_new])
patch_temp = patch.reshape(tuple(ls_sh))
for i in xrange(len(new_shape)):
patch_temp = patch_temp.mean(i+1)
return patch_temp
class PickPatch2D(PickPatch):
"""
Pick a 2D patch centered on the voxels. The final patch has a width of patch_width but captures a window of
patch_width * scale width which is averaged.
"""
def __init__(self, patch_width, orthogonal_axis, scale=1):
PickPatch.__init__(self, patch_width**2, patch_width, scale)
self.orthogonal_axis = orthogonal_axis
self.parallel_axis = range(3)
del self.parallel_axis[self.orthogonal_axis]
def extract_patch(self, mri, single_vx, patch_width):
s = [slice(None)]*3
s[self.orthogonal_axis] = single_vx[self.orthogonal_axis]
mri_slice = mri[s]
vx_slice = single_vx[self.parallel_axis]
radius = patch_width / 2
return mri_slice[vx_slice[0] - radius:vx_slice[0] + radius + 1,
vx_slice[1] - radius:vx_slice[1] + radius + 1]
class PickPatch3D(PickPatch):
def __init__(self, patch_width, scale=1):
PickPatch.__init__(self, patch_width**3, patch_width, scale)
def extract_patch(self, mri, single_vx, patch_width):
dims = mri.shape
radius = self.patch_width / 2
def crop(j, voxel):
v = np.arange(voxel[j] - radius, voxel[j] + radius + 1)
v[v < 0] = 0
v[v >= dims[j]] = dims[j]-1
return v
v_axis = []
for ax in range(3):
v_axis.append(crop(ax, single_vx))
x, y, z = np.meshgrid(v_axis[0], v_axis[1], v_axis[2])
# idx_patch = np.ravel_multi_index((x.ravel(), y.ravel(), z.ravel()), dims)
patch = mri[x, y, z]
return patch
class PickComposed(PickFeatures):
"""
PickFeatures subclass composed of a list of PickFeatures objects. Method pick returns an array of features. This
array is the concatenation of the features arrays picked by each PickFeatures object.
Attributes:
ls_pick_features (list of PickFeatures objects): list containing the PickFeatures
ls_slices_different_features (list of slices): slices corresponding to each set of homogeneous features
"""
def __init__(self, ls_pick_patch):
self.ls_pick_features = ls_pick_patch
self.ls_slices_different_features = []
n_features = 0
required_pad = 0
c = 0
for pick_patch in ls_pick_patch:
self.ls_slices_different_features.append(slice(c, c+pick_patch.n_features))
c += pick_patch.n_features
n_features += pick_patch.n_features
if required_pad < pick_patch.required_pad:
required_pad = pick_patch.required_pad
PickFeatures.__init__(self, n_features, required_pad, len(ls_pick_patch))
def pick(self, vx, mri, label, region_centroids=None):
n_vx = vx.shape[0]
patch = np.zeros((n_vx, self.n_features), dtype=np.float32)
ls_extra_info = []
for slice_features, pick_patch in zip(self.ls_slices_different_features, self.ls_pick_features):
res = pick_patch.pick(vx, mri, label, region_centroids)
patch[:, slice_features] = res[0]
if res[1] is not None:
ls_extra_info.append(None)
else:
ls_extra_info.append(res[1:])
return patch, ls_extra_info
def has_instance_of(self, class_object):
for pick_patch in self.ls_pick_features:
if isinstance(pick_patch, class_object):
return True
return False
def __iter__(self):
return self.ls_pick_features.__iter__()
def next(self):
return self.ls_pick_features.next() | {
"repo_name": "adbrebs/spynet",
"path": "data/utils_3d/pick_patch.py",
"copies": "1",
"size": "8132",
"license": "bsd-2-clause",
"hash": -4382058955453157400,
"line_mean": 34.3608695652,
"line_max": 118,
"alpha_frac": 0.6159616331,
"autogenerated": false,
"ratio": 3.5713658322353976,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9681173110888328,
"avg_score": 0.0012308708894138993,
"num_lines": 230
} |
__author__ = 'adeb'
import numpy as np
def create_pick_target(config_ini):
"""
Factory function to the objects responsible for picking the targets
"""
how_tg = config_ini.pick_tg["how"]
if how_tg == "center":
pick_tg = PickTgCentered()
# elif how_tg == "proportion":
# pick_tg = PickTgProportion()
else:
print "error in pick_tg"
return
return pick_tg
class PickTarget():
"""
Manage the labelling of the patches
"""
def __init__(self):
pass
def pick(self, vx, n_classes, mri, label):
tg = np.zeros((vx.shape[0], n_classes), dtype=np.float32)
self.pick_virtual(tg, vx, n_classes, mri, label)
return tg
def pick_virtual(self, tg, vx, n_classes, mri, label):
raise NotImplementedError
class PickTgCentered(PickTarget):
"""
The label of each patch is the label of the central voxel of the patch
"""
def __init__(self):
PickTarget.__init__(self)
def pick_virtual(self, tg, vx, n_classes, mri, label):
tg[np.arange(tg.shape[0]), label[[vx[:, i] for i in xrange(3)]]] = 1
# class PickTgProportion(PickTarget):
# """
# For each patch, the target is the vector of proportions of each class in the patch
# """
# def __init__(self):
# PickTarget.__init__(self)
#
# def pick_virtual(self, tg, vx, idx_patch, n_classes, mri, label):
# lab_flat = label.ravel()
# for i in xrange(vx.shape[0]):
# a = np.bincount(lab_flat[idx_patch[i]])
# b = np.nonzero(a)[0]
# c = a[b].astype(float, copy=False)
# c = c / sum(c)
# tg[i, b] = c | {
"repo_name": "adbrebs/spynet",
"path": "data/utils_3d/pick_target.py",
"copies": "1",
"size": "1692",
"license": "bsd-2-clause",
"hash": -8568619224250286000,
"line_mean": 25.873015873,
"line_max": 88,
"alpha_frac": 0.5644208038,
"autogenerated": false,
"ratio": 3.2413793103448274,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9301855868627549,
"avg_score": 0.000788849103455845,
"num_lines": 63
} |
__author__ = 'adeb'
import sys
import h5py
from spynet.utils.utilities import get_h5file_attribute, error_rate
from spynet.models.layer import *
from spynet.models.layer_block import *
from spynet.models import neuron_type
class Network(object):
"""
Abstract class whose child classes define custom user networks.
Attributes:
n_in (int): number of inputs of the network
n_out (int): Number of outputs of the network
ls_layers (list): list of the layers composing the network
ls_params (list): list of arrays of parameters of all the layers
"""
def __init__(self):
self.name = self.__class__.__name__
self.n_in = None
self.n_out = None
self.ls_layers = []
self.ls_params = []
def init_common(self, n_in, n_out):
print "Initialize the model ..."
self.n_in = n_in
self.n_out = n_out
def concatenate_parameters(self):
self.ls_params = []
for l in self.ls_layers:
self.ls_params += l.params
def forward(self, in_batch, batch_size, run_time):
"""Return the output of the network
Args:
in_batch (theano.tensor.TensorType): input batch of the network
Returns:
(theano.tensor.TensorType): outputs of the network
"""
out_batch = [in_batch]
for l in self.ls_layers:
out_batch = l.forward(out_batch, batch_size, run_time)
return out_batch[0]
def generate_testing_function(self, batch_size):
"""
Generate a C-compiled function that can be used to compute the output of the network from an input batch
Args:
batch_size (int): the input of the returned function will be a batch of batch_size elements
Returns:
(function): function that returns the output of the network for a given input batch
"""
in_batch = T.matrix('in_batch') # Minibatch input matrix
y_pred = self.forward(in_batch, batch_size, run_time=True) # Output of the network
return theano.function([in_batch], y_pred)
def predict(self, in_numpy_array, batch_size_limit):
"""
User-friendly function to return the outputs of provided inputs without worrying about batch_size.
Args:
in_numpy_array (2D array): dataset in which rows are datapoints
batch_size_limit (int): limit size of a batch (should be what the GPU memory can support (or the RAM))
Returns:
pred (2D array): outputs of the network for the given inputs
"""
n_inputs = in_numpy_array.shape[0]
out_pred = np.zeros((n_inputs, self.n_out), dtype=np.float32) # Will store the output predictions
batch_size = min(batch_size_limit, n_inputs)
pred_fun = self.generate_testing_function(batch_size)
n_batches, n_rest = divmod(n_inputs, batch_size)
print "--------------------"
for b in xrange(n_batches):
sys.stdout.write("\r Prediction: {}%".format(100*b/n_batches))
sys.stdout.flush()
id0 = b*batch_size
id1 = id0 + batch_size
out_pred[id0:id1] = pred_fun(in_numpy_array[id0:id1])
if n_rest > 0:
pred_fun_res = self.generate_testing_function(n_rest)
out_pred[n_batches*batch_size:] = pred_fun_res(in_numpy_array[n_batches*batch_size:])
return out_pred
def predict_from_generator(self, batches_generator, scaler, pred_functions=None):
"""
Returns the predictions of the batches of voxels, features and targets yielded by the batches_generator
"""
if pred_functions is None:
pred_functions = {}
ls_vx = []
ls_pred = []
id_batch = 0
for vx_batch, patch_batch, tg_batch in batches_generator:
id_batch += 1
batch_size_current = len(vx_batch)
if batch_size_current not in pred_functions:
pred_functions[batch_size_current] = self.generate_testing_function(batch_size_current)
if scaler is not None:
scaler.scale(patch_batch)
pred_raw = pred_functions[batch_size_current](patch_batch)
pred = np.argmax(pred_raw, axis=1)
err = error_rate(pred, np.argmax(tg_batch, axis=1))
print " {}".format(err)
ls_vx.append(vx_batch)
ls_pred.append(pred)
# Count the number of voxels
n_vx = 0
for vx in ls_vx:
n_vx += vx.shape[0]
# Aggregate the data
vx_all = np.zeros((n_vx, 3), dtype=int)
pred_all = np.zeros((n_vx,), dtype=int)
idx = 0
for vx, pred in zip(ls_vx, ls_pred):
next_idx = idx+vx.shape[0]
vx_all[idx:next_idx] = vx
pred_all[idx:next_idx] = pred
idx = next_idx
return vx_all, pred_all
def save_parameters(self, file_path):
"""
Save parameters (weights, biases, scaling info) of the network in an hdf5 file
"""
f = h5py.File(file_path, "w")
f.attrs['network_type'] = self.__class__.__name__
f.attrs['n_in'] = self.n_in
f.attrs['n_out'] = self.n_out
self.save_parameters_virtual(f)
for i, l in enumerate(self.ls_layers):
l.save_parameters(f, "layer" + str(i))
f.close()
def save_parameters_virtual(self, h5file):
raise NotImplementedError
def load_parameters(self, h5file):
"""
Load parameters (weights, biases, scaling info) of the network from an hdf5 file
If reset_network is True, then the layers are re-created.
If reset_network is False, then the layers are only updated
"""
self.n_in = int(get_h5file_attribute(h5file, "n_in"))
self.n_out = int(get_h5file_attribute(h5file, "n_out"))
self.load_parameters_virtual(h5file)
for i, l in enumerate(self.ls_layers):
l.load_parameters(h5file, "layer" + str(i))
def load_parameters_virtual(self, h5file):
raise NotImplementedError
def __str__(self):
n_parameters = 0
for p in self.ls_params:
n_parameters += p.get_value().size
msg = "This network has the following layers: \n"
for i, l in enumerate(self.ls_layers):
msg += "------- Layer {} ------- \n".format(i)
msg += l.__str__()
msg += "The type of this network is {}. It has {} inputs, {} outputs and {} parameters \n"\
.format(self.name, self.n_in, self.n_out, n_parameters)
return msg
def export_params(self):
"""
Return the real value of Theano shared variables params.
"""
params = []
for p in self.ls_params:
params.append(p.get_value())
return params
def import_params(self, params):
"""
Update Theano shared variable self.params with numpy variable params.
"""
for p, p_sym in zip(params, self.ls_params):
p_sym.set_value(p, borrow=True)
def get_layer(self, idx_layer):
return self.ls_layers[idx_layer]
def update_params(self):
self.ls_params = []
for l in self.ls_layers:
l.update_params()
self.ls_params += l.params
# Check that there no duplicates (if layers share weights) (TODO: should be put in a set since the beginning)
self.ls_params = list(set(self.ls_params))
class MLP(Network):
"""
Multi-layer perceptron
"""
def __init__(self):
Network.__init__(self)
def init(self, ls_layer_size, dropout=False, dropout_p=None, neuron_function=neuron_type.NeuronRELU()):
Network.init_common(self, ls_layer_size[0], ls_layer_size[-1])
ls_block = []
for i in xrange(len(ls_layer_size)-2):
ls_block.append(LayerBlockFullyConnected(neuron_function, ls_layer_size[i], ls_layer_size[i+1]))
if dropout:
ls_block.append(LayerBlockNoiseDropoutBernoulli(dropout_p[i]))
# Last layer is softmax
ls_block.append(LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), ls_layer_size[-2], ls_layer_size[-1]))
self.ls_layers = convert_blocks_into_feed_forward_layers(ls_block)
self.concatenate_parameters()
def save_parameters_virtual(self, h5file):
pass
def load_parameters_virtual(self, h5file):
# This function is not working properly, see issue 2 on GitHub
pass
class AutoEncoder(Network):
"""
autoencoder
"""
def __init__(self):
Network.__init__(self)
def init(self, ls_layer_size, dropout=False, dropout_p=None, neuron_function=neuron_type.NeuronRELU()):
Network.init_common(self, ls_layer_size[0], ls_layer_size[-1])
ls_block = []
for i in xrange(len(ls_layer_size)-2):
ls_block.append(LayerBlockFullyConnected(neuron_function, ls_layer_size[i], ls_layer_size[i+1]))
if dropout:
ls_block.append(LayerBlockNoiseDropoutBernoulli(dropout_p[i]))
# Last layer is softmax
ls_block.append(LayerBlockFullyConnected(neuron_function, ls_layer_size[-2], ls_layer_size[-1]))
self.ls_layers = convert_blocks_into_feed_forward_layers(ls_block)
self.concatenate_parameters()
def save_parameters_virtual(self, h5file):
pass
def load_parameters_virtual(self, h5file):
# This function is not working properly, see issue 2 on GitHub
pass
class ConvNet2DExample(Network):
"""
2D convnet
"""
def __init__(self):
Network.__init__(self)
self.in_width = None
self.in_height = None
def init(self, patch_height, patch_width, n_out):
Network.init_common(self, patch_height*patch_width, n_out)
self.in_height = patch_height
self.in_width = patch_width
neuron_relu = neuron_type.NeuronRELU()
# Layer 0
kernel_height0 = 5
kernel_width0 = 5
pool_size_height0 = 2
pool_size_width0 = 2
n_kern0 = 20
block0 = LayerBlockConvPool2D(neuron_relu,
in_shape=(1, patch_height, patch_width),
flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
poolsize=(pool_size_height0, pool_size_width0))
# Layer 1
filter_map_height1 = (patch_height - kernel_height0 + 1) / pool_size_height0
filter_map_width1 = (patch_width - kernel_width0 + 1) / pool_size_width0
kernel_height1 = 5
kernel_width1 = 5
pool_size_height1 = 2
pool_size_width1 = 2
n_kern1 = 50
block1 = LayerBlockConvPool2D(neuron_relu,
in_shape=(n_kern0, filter_map_height1, filter_map_width1),
flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
poolsize=(pool_size_height1, pool_size_width1))
# Layer 2
filter_map_height2 = (filter_map_height1 - kernel_height1 + 1) / pool_size_height1
filter_map_with2 = (filter_map_width1 - kernel_width1 + 1) / pool_size_width1
n_in2 = n_kern1 * filter_map_height2 * filter_map_with2
n_out2 = 500
block2 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
# Layer 3
block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out2, n_out=self.n_out)
self.ls_layers = convert_blocks_into_feed_forward_layers([block0, block1, block2, block3])
self.ls_params = []
for l in self.ls_layers:
self.ls_params += l.params
def save_parameters_virtual(self, h5file):
h5file.attrs['in_height'] = self.in_height
h5file.attrs['in_width'] = self.in_width
def load_parameters_virtual(self, h5file):
self.in_height = int(h5file.attrs["in_height"])
self.in_width = int(h5file.attrs["in_width"])
self.init(self.in_height, self.in_width, self.n_out)
class ConvNet3DExample(Network):
"""
3D convnet
"""
def __init__(self):
Network.__init__(self)
self.in_height = None
self.in_width = None
self.in_depth = None
def init(self, patch_height, patch_width, patch_depth, n_out):
Network.init_common(self, patch_height*patch_width*patch_depth, n_out)
self.in_height = patch_height
self.in_width = patch_width
self.in_depth = patch_depth
neuron_relu = neuron_type.NeuronRELU()
# Layer 0
filter_map_0_shape = np.array([patch_height, patch_width, patch_depth], dtype=int)
filter_0_shape = np.array([2, 2, 2], dtype=int)
pool_0_shape = np.array([2, 2, 2], dtype=int)
n_kern0 = 20
block0 = LayerBlockConvPool3D(neuron_relu,
1, tuple(filter_map_0_shape),
n_kern0, tuple(filter_0_shape),
poolsize=tuple(pool_0_shape))
# Layer 1
filter_map_1_shape = (filter_map_0_shape - filter_0_shape + 1) / pool_0_shape
filter_1_shape = np.array([2, 2, 2], dtype=int)
pool_1_shape = np.array([2, 2, 2], dtype=int)
n_kern1 = 50
block1 = LayerBlockConvPool3D(neuron_relu,
n_kern0, tuple(filter_map_1_shape),
n_kern1, tuple(filter_1_shape),
poolsize=tuple(pool_1_shape))
# Layer 2
filter_map_2_shape = (filter_map_1_shape - filter_1_shape + 1) / pool_1_shape
n_in2 = n_kern1 * np.prod(filter_map_2_shape)
n_out2 = 500
block2 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
# Layer 3
block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out2, n_out=self.n_out)
self.ls_layers = convert_blocks_into_feed_forward_layers([block0, block1, block2, block3])
self.ls_params = []
for l in self.ls_layers:
self.ls_params += l.params
def save_parameters_virtual(self, h5file):
h5file.attrs['in_height'] = self.in_height
h5file.attrs['in_width'] = self.in_width
h5file.attrs['in_depth'] = self.in_depth
def load_parameters_virtual(self, h5file):
self.in_height = int(h5file.attrs["in_height"])
self.in_width = int(h5file.attrs["in_width"])
self.in_depth = int(h5file.attrs["in_depth"])
| {
"repo_name": "adbrebs/spynet",
"path": "models/network.py",
"copies": "1",
"size": "14714",
"license": "bsd-2-clause",
"hash": 5436671487758694000,
"line_mean": 35.3308641975,
"line_max": 117,
"alpha_frac": 0.5813510942,
"autogenerated": false,
"ratio": 3.5609874152952568,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9635335305575656,
"avg_score": 0.0014006407839202574,
"num_lines": 405
} |
__author__ = 'adeb'
import theano.tensor as T
class Layer():
"""
This abstract class represents a layer of a neural network. A spynet layer is more general than the common
definition of a layer of neurons in the sense that a spynet layer is not necessary composed of neurons. As
described in the child classes, a spynet layer can simply merge or divide the inputs.
"""
def __init__(self):
self.params = []
def forward(self, ls_input, batch_size, run_time):
"""Return the output of the layer block
Args:
ls_input (list of theano.tensor.TensorType): input of the layer
Returns:
(list of theano.tensor.TensorType): output of the layer
"""
raise NotImplementedError
def save_parameters(self, h5file, name):
"""
Save all parameters of the block layer in a hdf5 file.
"""
pass
def load_parameters(self, h5file, name):
"""
Load all parameters of the block layer in a hdf5 file.
"""
pass
def update_params(self):
pass
def __str__(self):
"""
Should end with \n.
"""
raise NotImplementedError
class LayerMergeFeatures(Layer):
"""
Merge the output features of the previous layer.
"""
def __init__(self):
Layer.__init__(self)
def forward(self, ls_inputs, batch_size, run_time):
return [T.concatenate(ls_inputs, axis=1)]
def __str__(self):
return "Merging layer\n"
class LayerDivideFeatures(Layer):
"""
Divide the output features of the previous layer so that different blocks can be used in the next layer.
Attributes:
ls_split_idx: List of indices of where the input features should be divided. For example, if
ls_split_idx = [0 700 3000], the features will be divided in two: the first [0 699] features on one
side and the other [700 2999] features on the other side.
"""
def __init__(self, ls_split_idx):
Layer.__init__(self)
self.ls_split_idx = ls_split_idx
def forward(self, ls_inputs, batch_size, run_time):
if len(ls_inputs) != 1:
raise Exception("LayerDivide's input should be of length 1")
input = ls_inputs[0]
ls_outputs = []
for i in xrange(len(self.ls_split_idx) - 1):
s = slice(self.ls_split_idx[i], self.ls_split_idx[i+1])
ls_outputs.append(input[:, s])
return ls_outputs
def __str__(self):
return "Dividing layer\n"
class LayerOfBlocks(Layer):
"""
Layer composed of blocks of neurons. A LayerOfBlocks has the same meaning as a layer in the neural network
vocabulary.
Attributes:
ls_layer_blocks: List of LayerBlock objects
"""
def __init__(self, ls_layer_blocks):
Layer.__init__(self)
self.ls_layer_blocks = ls_layer_blocks
self.update_params()
def forward(self, ls_inputs, batch_size, run_time):
ls_outputs = []
for x, layer_block in zip(ls_inputs, self.ls_layer_blocks):
ls_outputs.append(layer_block.forward(x, batch_size, run_time))
return ls_outputs
def save_parameters(self, h5file, name):
for i, l in enumerate(self.ls_layer_blocks):
l.save_parameters(h5file, name + "/block" + str(i))
def load_parameters(self, h5file, name):
for i, l in enumerate(self.ls_layer_blocks):
l.load_parameters(h5file, name + "/block" + str(i))
def update_params(self):
self.params = []
for l in self.ls_layer_blocks:
l.update_params()
self.params += l.params
def __str__(self):
msg = "Layer composed of the following block(s):\n"
for i, l in enumerate(self.ls_layer_blocks):
msg += "Block " + str(i) + ":\n" + l.__str__() + "\n"
return msg
def get_layer_block(self, idx_block):
return self.ls_layer_blocks[idx_block]
def convert_blocks_into_feed_forward_layers(ls_layer_blocks):
"""
Convenient function to convert a list of layer blocks into a list of LayerOfBlocks, each LayerOfBlock containing a
single block. It is useful when you don't need to divided the features of your data.
"""
ls_layers = []
for layer_block in ls_layer_blocks:
ls_layers.append(LayerOfBlocks([layer_block]))
return ls_layers | {
"repo_name": "adbrebs/spynet",
"path": "models/layer.py",
"copies": "1",
"size": "4423",
"license": "bsd-2-clause",
"hash": 764022436158097400,
"line_mean": 31.0579710145,
"line_max": 118,
"alpha_frac": 0.6084105811,
"autogenerated": false,
"ratio": 3.8030954428202923,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49115060239202923,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeb'
import time
import numpy as np
import theano
import theano.tensor as T
class Trainer():
"""
Class that supervises the training of a neural network.
Attributes:
net (Network object): the network to be trained
ds_training (Dataset object): the dataset on which the network is trained
cost_function (CostFunction object): the cost function of the training
batch_size (int): number of training datapoints to include in a training batch
n_train_batches (int): number of batches that the dataset contains
ls_monitors (list of Monitor objects): each monitor tracks a particular statistic of the training
ls_stopping_criteria (list of StoppingCriterion objects): stopping criteria that decide when to
stop the training
train_minibatch (function): function to train the network on a single minibatch
"""
def __init__(self, net, cost_function, params_selector, ls_stopping_criteria,
learning_update, ds_training, batch_size, ls_monitors):
print 'Configure training ...'
self.net = net
self.cost_function = cost_function
self.params_selector = params_selector
self.ds_training = ds_training
self.ls_monitors = ls_monitors
self.ls_stopping_criteria = ls_stopping_criteria
self.learning_update = learning_update
self.batch_size = batch_size
self.n_train_batches = None
self.train_minibatch = None
self.init()
# Allow to reinitialize the object
def init(self):
self.n_train_batches = self.ds_training.n_data / self.batch_size
for monitor in self.ls_monitors:
monitor.init(self)
for stopping_crit in self.ls_stopping_criteria:
stopping_crit.init()
self.params_selector.init(self.net)
# Minibatch input matrix
in_batch = T.matrix('in_batch')
# True output (target) of a minibatch
tg_batch = T.matrix('tg_batch')
# Predicted output of the network for an input batch
pred_batch = self.net.forward(in_batch, self.batch_size, False)
# Cost the trainer is going to minimize
cost = self.cost_function.compute_cost_symb(pred_batch, tg_batch)
# Compute gradients
params = self.net.ls_params
grads = T.grad(cost, params)
# Compute updates
updates = self.learning_update.compute_updates(params, grads)
idx_batch = T.lscalar()
id1 = idx_batch * self.batch_size
id2 = (idx_batch + 1) * self.batch_size
in_train = self.ds_training.inputs_shared
out_train = self.ds_training.outputs_shared
self.train_minibatch = theano.function(
inputs=[idx_batch],
outputs=cost,
updates=updates,
givens={in_batch: in_train[id1:id2], tg_batch: out_train[id1:id2]})
def check_if_stop(self, epoch, minibatch_idx, id_minibatch, verbose=True):
"""
Check if the training should stop
"""
for stopping_cri in self.ls_stopping_criteria:
if stopping_cri.check_if_stop(epoch, minibatch_idx, id_minibatch, verbose):
return True
return False
def record(self, epoch, epoch_minibatch, id_minibatch, force_record=False, update_stopping=True, verbose=True):
"""
Record statistics about the training.
Returns True is at least one value is recorded.
"""
updated_monitors = [] # memorize monitors that record a new value
for i, monitor in enumerate(self.ls_monitors):
has_monitored = monitor.record(epoch, epoch_minibatch, id_minibatch, force_record, update_stopping, verbose)
if has_monitored:
updated_monitors.append(i)
if verbose and updated_monitors:
print(" minibatch {}/{}:".format(epoch_minibatch, self.n_train_batches))
for i in updated_monitors:
print(" {}".format(self.ls_monitors[i].str_value_from_position(-1)))
def train(self):
print "Train the network ..."
start_time = time.clock()
freq_display_batch = max(self.n_train_batches / 4, 1) # Frequency for printing the batch id
epoch_id = minibatch_id = 0
# Record statistics before training really starts
# self.record(epoch_id, 0, minibatch_id)
stop = False
while not stop:
starting_epoch_time = time.clock()
epoch_id += 1
print("Epoch {}".format(epoch_id))
if epoch_id == 40:
self.learning_update.learning_rate.set_value(self.learning_update.learning_rate.get_value()/2)
if epoch_id == 60:
self.learning_update.learning_rate.set_value(self.learning_update.learning_rate.get_value()/2)
if epoch_id == 80:
self.learning_update.learning_rate.set_value(self.learning_update.learning_rate.get_value()/2)
for epoch_minibatch in xrange(1, 1+self.n_train_batches):
minibatch_id += 1
# Display minibatch number
if epoch_minibatch % freq_display_batch == 0:
print(" minibatch {}/{}".format(epoch_minibatch, self.n_train_batches))
# Train on the current minibatch
if np.isnan(self.train_minibatch(epoch_minibatch-1)):
raise Exception("Error: The cost is Nan. Epoch {}, minibatch {}".format(epoch_id, epoch_minibatch))
# Record statistics
self.record(epoch_id, epoch_minibatch, minibatch_id)
# Check if a stopping criterion is met
if self.check_if_stop(epoch_id, 0, minibatch_id):
stop = True
break
if not stop:
print(" epoch {} finished after {} seconds".format(epoch_id, time.clock() - starting_epoch_time))
end_time = time.clock()
print ("Training ran for {} minutes".format((end_time - start_time) / 60.))
# Update the network with the selected parameters
self.params_selector.update_network()
# Display monitored values for the best network
self.record(None, None, self.params_selector.best_iter, force_record=True, update_stopping=False) | {
"repo_name": "adbrebs/spynet",
"path": "training/trainer.py",
"copies": "1",
"size": "6391",
"license": "bsd-2-clause",
"hash": 4634555981907628000,
"line_mean": 37.9756097561,
"line_max": 120,
"alpha_frac": 0.612893131,
"autogenerated": false,
"ratio": 4.104688503532434,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0013656785858653863,
"num_lines": 164
} |
__author__ = 'adeb'
from datetime import datetime
import h5py
import numpy as np
from spynet.utils.utilities import open_h5file, share
class Dataset(object):
"""
Class to store a dataset composed of two 2D numpy arrays called respectively inputs and outputs.
Their rows represent datapoints. inputs' columns represent the features of the datapoints. outputs' columns
represent output variables. If the outputs are not know, set the variable to None.
This class can also load the data on the GPU by transforming it into theano shared variables.
You can load/save the dataset from/in hdf5 files.
You can inherit from this class if you want to specify your own data attributes. If so, you may want to overwrite
these methods: shuffle_data_virtual, write_virtual, read_virtual, copy_dataset_slice_virtual
Attributes:
inputs (2D numpy array): rows represent datapoints and columns represent features
outputs (2D numpy array): if known, corresponding outputs of the inputs. rows represent datapoints and columns
represent output vrariables.
n_in_features (int): number of input featurse
n_out_features (int): number of output features
n_data (int): number of datapoints
is_perm (boolean): indicates if the dataset is shuffled or not
"""
def __init__(self):
self._inputs = None
self._inputs_shared = None
self.n_in_features = None
self._outputs = None
self._outputs_shared = None
self.n_out_features = None
self.n_data = None
self.is_perm = False
# Avoid sharing multiple times the variables (avoid copies in memory)
self.shared_inputs_created = False
self.shared_outputs_created = False
@property
def inputs(self):
return self._inputs
@inputs.setter
def inputs(self, value):
self._inputs = value
self.n_data, self.n_in_features = value.shape
@property
def inputs_shared(self):
if not self.shared_inputs_created:
self._inputs_shared = share(self.inputs)
self.shared_inputs_created = True
else:
self._inputs_shared.set_value(self.inputs, borrow=True)
return self._inputs_shared
@inputs_shared.setter
def inputs_shared(self, value):
raise Exception("The shared variable should not be replaced.")
@property
def outputs(self):
return self._outputs
@outputs.setter
def outputs(self, value):
self._outputs = value
self.n_out_features = value.shape[1]
@property
def outputs_shared(self):
if not self.shared_outputs_created:
self._outputs_shared = share(self.outputs)
self.shared_outputs_created = True
else:
self._outputs_shared.set_value(self.outputs, borrow=True)
return self._outputs_shared
@outputs_shared.setter
def outputs_shared(self, value):
raise Exception("The shared variable should not be replaced.")
def shuffle_data(self):
"""
Shuffle the dataset.
"""
perm = np.random.permutation(self.n_data)
self.inputs = self.inputs[perm]
self.outputs = self.outputs[perm]
self.shuffle_data_virtual(perm)
def shuffle_data_virtual(self, perm):
"""
Should be overwritten if you define a child class with attributes that need to be shuffled as well.
"""
pass
def write(self, file_path):
"""
write the dataset in a hdf5 file.
"""
h5file = h5py.File(file_path, "w")
h5file.create_dataset("inputs", data=self.inputs, dtype='f')
h5file.attrs['creation_date'] = str(datetime.now())
h5file.attrs['n_data'] = self.n_data
h5file.attrs['n_in_features'] = self.n_in_features
h5file.attrs['is_perm'] = self.is_perm
h5file.create_dataset("outputs", data=self.outputs, dtype='f')
h5file.attrs['n_out_features'] = self.n_out_features
self.write_virtual(h5file)
h5file.close()
def write_virtual(self, h5file):
pass
def read(self, file_path):
"""
load the dataset from a hdf5 file.
"""
h5file = open_h5file(file_path)
self.inputs = h5file["inputs"].value
self.is_perm = bool(h5file.attrs['is_perm'])
self.outputs = h5file["outputs"].value
self.read_virtual(h5file)
h5file.close()
def read_virtual(self, h5file):
pass
def split_dataset_proportions(self, proportions):
"""
Split the dataset into subdatasets
Args:
- proportions is the list of proportions of the resulting datasets. It should sum to 1. For example, it could
be [0.15, 0.15].
"""
proportions = np.asarray(proportions)
splits = np.cumsum(proportions * self.n_data)
splits = splits.astype(int)
return self.split_dataset_indices(splits)
def split_dataset_indices(self, indices):
"""
Split a dataset in subdatasets according to a list of indices
"""
if indices[0] < 0 or indices[-1] > self.n_data:
raise Exception("the split indices should be between 0 and the size of the dataset")
ds = []
prev_idx = 0
for i in indices:
ds.append(self.duplicate_datapoints_slice(slice(prev_idx, i)))
prev_idx = i
return ds
def duplicate_datapoints_slice(self, slice_idx):
"""
Create a Dataset that correspond to a slice of the Dataset calling the function.
"""
ds = type(self)()
ds.inputs = self.inputs[slice_idx]
ds.outputs = self.outputs[slice_idx]
self.duplicate_datapoints_slice_virtual(ds, slice_idx)
return ds
def duplicate_datapoints_slice_virtual(self, ds, slice_idx):
"""
In case there are more attributes to slice.
ds.your_attribute = self.your_attribute[slice_idx]
"""
pass
def add_features(self, data_to_add):
"""
Add features to the current dataset.
"""
assert self.inputs.shape[0] == data_to_add.shape[0]
self.inputs = np.concatenate((self.inputs, data_to_add), axis=1)
self.n_in_features += data_to_add.shape[1]
@staticmethod
def create_and_read(file_name):
ds = Dataset()
ds.read(file_name)
return ds
class Scaler():
"""
Class responsible for scaling data. The slices of the data features are specified in the list ls_slices.
The lists ls_slices, ls_means and ls_stds correspond.
"""
def __init__(self, ls_slices):
self.ls_slices = ls_slices
self.ls_means = []
self.ls_stds = []
def compute_parameters(self, data):
self.ls_means = []
self.ls_stds = []
for s in self.ls_slices:
self.ls_means.append(data[:, s].mean(axis=0))
self.ls_stds.append(data[:, s].std(axis=0))
def scale(self, data):
for i, s in enumerate(self.ls_slices):
zo = self.ls_stds[i] != 0
data[:, s] -= self.ls_means[i]
data[:, s][:, zo] /= self.ls_stds[i][zo] | {
"repo_name": "adbrebs/spynet",
"path": "data/dataset.py",
"copies": "1",
"size": "7275",
"license": "bsd-2-clause",
"hash": -1689261674922278400,
"line_mean": 31.3377777778,
"line_max": 119,
"alpha_frac": 0.6130584192,
"autogenerated": false,
"ratio": 3.8924558587479936,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5005514277947993,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeb'
import numpy as np
import theano
from theano.tensor.signal import downsample
from theano.tensor.nnet import conv, conv3d2d
from theano.tensor.shared_randomstreams import RandomStreams
from spynet.utils.utilities import share, get_h5file_data
from spynet.models.max_pool_3d import max_pool_3d
class LayerBlock():
"""
Abstract class that represents a function from an input space to an output space.
It is the building block of a Layer object.
"""
name = None
def __init__(self):
self.params = []
def forward(self, x, batch_size, run_time):
"""Return the output of the layer block
Args:
x (theano.tensor.TensorType): input of the layer block
batch_size (int): size of the batch of data being processed by the network
run_time (boolean): equals true when the function is used at runtime and false when it is used during
training. This is useful for dropout.
Returns:
(theano.tensor.TensorType): output of the layer block
"""
raise NotImplementedError
def save_parameters(self, h5file, name):
"""
Save all parameters of the layer block in a hdf5 file.
"""
pass
def load_parameters(self, h5file, name):
"""
Load all parameters of the layer block in a hdf5 file.
"""
pass
def update_params(self):
pass
def __str__(self):
msg = "[{}] \n".format(self.name)
return msg
class LayerBlockIdentity(LayerBlock):
"""
Identity function
"""
name = "Identity Layer block"
def __init__(self):
LayerBlock.__init__(self)
def forward(self, x, batch_size, run_time):
return x
class LayerBlockNoise(LayerBlock):
"""
Noise layer block that adds a random signal on the fly
"""
def __init__(self):
LayerBlock.__init__(self)
numpy_rng = np.random.RandomState(123)
self.theano_rng = RandomStreams(numpy_rng.randint(2**30))
class LayerBlockNoiseDropoutBernoulli(LayerBlockNoise):
"""
Noise block layer that adds bernoulli noise on the fly
"""
name = "Bernoulli Layer block"
def __init__(self, bernoulli_p):
LayerBlockNoise.__init__(self)
self.bernoulli_p = bernoulli_p
def forward(self, x, batch_size, run_time):
if run_time:
return x * self.bernoulli_p
else:
return x * self.theano_rng.binomial(size=x.shape, n=1, p=self.bernoulli_p, dtype=theano.config.floatX)
class LayerBlockGaussianNoise(LayerBlockNoise):
"""
Noise block layer that adds gaussian noise on the fly
"""
name = "Gaussian noise Layer block"
def __init__(self):
LayerBlockNoise.__init__(self)
def forward(self, x, batch_size, run_time):
return x + self.theano_rng.normal(size=x.shape, avg=0, std=0.2, dtype=theano.config.floatX)
class LayerBlockMultiplication(LayerBlock):
"""
Block that multiplies the input elementwise by a vector of the same size
"""
name = "Multiplication Layer block"
def __init__(self, vec):
LayerBlock.__init__(self)
self.vec = share(vec)
def forward(self, x, batch_size, run_time):
return x * self.vec
class LayerBlockNormalization(LayerBlock):
"""
Block that normalizes the input so it sums to one
"""
name = "Normalization Layer block"
def __init__(self):
LayerBlock.__init__(self)
def forward(self, x, batch_size, run_time):
return x / theano.tensor.sum(x)
class LayerBlockOfNeurons(LayerBlock):
"""
Abstract class defining a group of neurons.
Attributes:
name (string): Name of the layer block (used for printing or writing)
w (theano shared numpy array): Weights of the layer block
b (theano shared numpy array): Biases of the layer block
params (list): [w,b]
neuron_type (NeuronType object): defines the type of the neurons of the layer block
"""
def __init__(self, neuron_type):
LayerBlock.__init__(self)
self.w = None
self.b = None
self.neuron_type = neuron_type
def init_parameters(self, w_shape, b_shape):
w_bound = self.compute_bound_parameters_virtual()
# initialize weights with random weights
self.w = share(np.asarray(
np.random.uniform(low=-w_bound, high=w_bound, size=w_shape),
dtype=theano.config.floatX), "w")
# the bias is a 1D tensor -- one bias per output feature map
b_values = 0.1 + np.zeros(b_shape, dtype=theano.config.floatX) # Slightly positive for RELU units
self.b = share(b_values, "b")
self.update_params()
def compute_bound_parameters_virtual(self):
raise NotImplementedError
def save_parameters(self, h5file, name):
h5file.create_dataset(name + "/w", data=self.w.get_value(), dtype='f')
h5file.create_dataset(name + "/b", data=self.b.get_value(), dtype='f')
def load_parameters(self, h5file, name):
self.w.set_value(get_h5file_data(h5file, name + "/w"), borrow=True)
self.b.set_value(get_h5file_data(h5file, name + "/b"), borrow=True)
def update_params(self):
self.params = [self.w, self.b]
def __str__(self):
msg = "[{}] with [{}] \n".format(self.name, self.neuron_type)
msg += self.print_virtual()
n_parameters = 0
for p in self.params:
n_parameters += p.get_value().size
msg += "Number of parameters: {} \n".format(n_parameters)
return msg
def print_virtual(self):
return ""
class LayerBlockFullyConnected(LayerBlockOfNeurons):
"""
Layer block in which each input is connected to all the block neurons
"""
name = "Fully connected layer block"
def __init__(self, neuron_type, n_in, n_out):
LayerBlockOfNeurons.__init__(self, neuron_type)
self.n_in = n_in
self.n_out = n_out
self.init_parameters((self.n_in, self.n_out), (self.n_out,))
def compute_bound_parameters_virtual(self):
return np.sqrt(6. / (self.n_in + self.n_out))
def set_w(self, new_w):
self.w.set_value(new_w, borrow=True)
self.n_in, self.n_out = new_w.shape
def forward(self, x, batch_size, run_time):
return self.neuron_type.activation_function(theano.tensor.dot(x, self.w) + self.b)
def print_virtual(self):
return "Number of inputs: {} \nNumber of outputs: {}\n".format(self.n_in, self.n_out)
class LayerBlockConv2DAbstract(LayerBlockOfNeurons):
"""
Abstract class defining common components of LayerConv2D and LayerConvPool2D
"""
def __init__(self, neuron_type, in_shape, flt_shape):
"""
Args:
in_shape (tuple or list of length 3):
(num input feature maps, image height, image width)
flt_shape (tuple or list of length 4):
(number of filters, num input feature maps, filter height, filter width)
"""
LayerBlockOfNeurons.__init__(self, neuron_type)
self.in_shape = in_shape
self.filter_shape = flt_shape
if in_shape[0] != flt_shape[1]:
raise Exception("The number of feature maps is not consistent")
self.init_parameters(flt_shape, (flt_shape[0],))
def forward(self, x, batch_size, run_time):
img_batch_shape = (batch_size,) + self.in_shape
x = x.reshape(img_batch_shape)
# Convolve input feature maps with filters
conv_out = conv.conv2d(input=x,
filters=self.w,
image_shape=img_batch_shape,
filter_shape=self.filter_shape)
return self.forward_virtual(conv_out)
def forward_virtual(self, conv_out):
raise NotImplementedError
def print_virtual(self):
return "Image shape: {}\nFilter shape: {}\n".format(self.in_shape, self.filter_shape)
class LayerBlockConv2D(LayerBlockConv2DAbstract):
"""
2D convolutional layer block
"""
name = "2D convolutional layer block"
def __init__(self, neuron_type, in_shape, flt_shape):
LayerBlockConv2DAbstract.__init__(self, neuron_type, in_shape, flt_shape)
def compute_bound_parameters_virtual(self):
fan_in = np.prod(self.filter_shape[1:])
fan_out = self.filter_shape[0] * np.prod(self.filter_shape[2:])
return np.sqrt(6. / (fan_in + fan_out))
def forward_virtual(self, conv_out):
return self.neuron_type.activation_function(conv_out + self.b.dimshuffle('x', 0, 'x', 'x')).flatten(2)
class LayerBlockConvPool2D(LayerBlockConv2DAbstract):
"""
2D convolutional layer + pooling layer. The reason for not having a separate pooling layer is that the combination
of the two layer blocks can be optimized.
"""
name = "2D convolutional + pooling layer"
def __init__(self, neuron_type, in_shape, flt_shape, poolsize=(2, 2)):
self.poolsize = poolsize
LayerBlockConv2DAbstract.__init__(self, neuron_type, in_shape, flt_shape)
def compute_bound_parameters_virtual(self):
fan_in = np.prod(self.filter_shape[1:])
fan_out = (self.filter_shape[0] * np.prod(self.filter_shape[2:]) / np.prod(self.poolsize))
return np.sqrt(6. / (fan_in + fan_out))
def forward_virtual(self, conv_out):
# Downsample each feature map individually, using maxpooling
pooled_out = downsample.max_pool_2d(input=conv_out,
ds=self.poolsize,
ignore_border=True)
return self.neuron_type.activation_function(pooled_out + self.b.dimshuffle('x', 0, 'x', 'x')).flatten(2)
def print_virtual(self):
return LayerBlockConv2DAbstract.print_virtual(self) + "Pool size: {}\n".format(self.poolsize)
class LayerBlockConvPool3D(LayerBlockOfNeurons):
"""
3D convolutional layer block + pooling layer block
"""
name = "3D convolutional + pooling layer block"
def __init__(self, neuron_type, in_channels, in_shape, flt_channels, flt_shape, poolsize):
"""
Args:
in_channels (int): number of input channels
in_shape (tuple of length 3): shape of the input (in_width, in_height, in_depth)
flt_channels (int):
flt_shape (tuple of length 3): shape of the filters (flt_depth, flt_height, flt_width)
poolsize (tuple of length 3): window of the pooling operation
"""
LayerBlockOfNeurons.__init__(self, neuron_type)
in_width, in_height, in_depth = self.in_shape = in_shape
flt_width, flt_height, flt_depth = self.flt_shape = flt_shape
self.in_channels = in_channels
self.flt_channels = flt_channels
self.image_shape = (in_depth, in_channels, in_height, in_width)
self.filter_shape = (flt_channels, flt_depth, in_channels, flt_height, flt_width)
self.poolsize = poolsize
self.init_parameters(self.filter_shape, (self.filter_shape[0],))
def compute_bound_parameters_virtual(self):
fan_in = np.prod(self.in_shape)
fan_out = self.flt_channels * np.prod(self.flt_shape) / np.prod(self.poolsize)
return np.sqrt(6. / (fan_in + fan_out))
def forward(self, x, batch_size, run_time):
img_batch_shape = (batch_size,) + self.image_shape
x = x.reshape(img_batch_shape)
# Convolve input feature maps with filters
conv_out = conv3d2d.conv3d(signals=x,
filters=self.w,
signals_shape=img_batch_shape,
filters_shape=self.filter_shape,
border_mode='valid')
perm = [0, 2, 1, 3, 4] # Permutation is needed due to the pooling function prototype
pooled_out = max_pool_3d(conv_out.dimshuffle(perm), self.poolsize, ignore_border=True)
return self.neuron_type.activation_function(pooled_out.dimshuffle(perm)
+ self.b.dimshuffle('x', 'x', 0, 'x', 'x')).flatten(2)
def print_virtual(self):
return "Image shape: {} \n Filter shape: {} \n Pool size: {} \n".format(
self.image_shape, self.filter_shape, self.poolsize) | {
"repo_name": "adbrebs/spynet",
"path": "models/layer_block.py",
"copies": "1",
"size": "12434",
"license": "bsd-2-clause",
"hash": 3850670920298809000,
"line_mean": 32.6081081081,
"line_max": 118,
"alpha_frac": 0.6126749236,
"autogenerated": false,
"ratio": 3.6710953646294655,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9779574981776469,
"avg_score": 0.0008390612905992823,
"num_lines": 370
} |
__author__ = 'adeksandrcernov'
# -*- coding: utf-8 -*-
from model.group import Group
from random import randrange
def test_edit_group_name(app,db):
if app.group.count() == 0:
app.group.create(Group(name="test"))
old_groups = db.get_group_list()
index = randrange (len(old_groups))
group = Group(name = "Updated group")
group.id = old_groups[index].id
app.group.edit_group_by_index(index, group)
new_groups = db.get_group_list()
assert len(old_groups) == len(new_groups)
old_groups[index] = group
assert sorted(old_groups, key=Group.id_or_max) == sorted (new_groups,key=Group.id_or_max)
#def test_edit_group_header(app):
# old_groups = app.group.get_group_list()
# group = Group(name = "Updated header")
# if app.group.count() == 0:
# app.group.create(Group(name="test"))
# app.group.edit_group(group)
# new_groups = app.group.get_group_list()
# assert len(old_groups) == len(new_groups)
# old_groups[0] = group
# assert sorted(old_groups, key=Group.id_or_max) == sorted (new_groups,key=Group.id_or_max) | {
"repo_name": "AlChernoff/python_training",
"path": "test/test_edit_group.py",
"copies": "1",
"size": "1172",
"license": "apache-2.0",
"hash": 6183812898558406000,
"line_mean": 39.4482758621,
"line_max": 98,
"alpha_frac": 0.6015358362,
"autogenerated": false,
"ratio": 3.1005291005291005,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9158461386905397,
"avg_score": 0.008720709964740718,
"num_lines": 29
} |
__author__ = 'adeksandrcernov'
from model.contact import Contact
from random import randrange
def test_edit_contact_lastname(app,db):
if app.contacts.count() == 0:
app.contacts.add_new_contact(Contact(firstname="test",address="test"))
old_contacts = db.get_contacts_list()
index = randrange (len(old_contacts))
contact = Contact(firstname="Newname")
contact.id = old_contacts[index].id
app.contacts.edit_contact_by_index(index, contact)
new_contacts = db.get_contacts_list()
assert len(old_contacts) == len(new_contacts)
old_contacts[index] = contact
assert sorted(old_contacts, key=Contact.id_or_max) == sorted (new_contacts,key=Contact.id_or_max)
#def test_edit_contact_address(app):
# if app.contacts.count() == 0:
# app.contacts.add_new_contact(Contact(firstname="test", address="test"))
# old_contacts = app.contacts.get_contacts_list()
# contact = Contact(address="New address")
# contact.id = old_contacts[0].id
# app.contacts.edit_first_contact(contact)
# new_contacts = app.contacts.get_contacts_list()
# assert len(old_contacts) == len(new_contacts)
# old_contacts[0] = contact
# assert sorted(old_contacts, key=Contact.id_or_max) == sorted (new_contacts,key=Contact.id_or_max) | {
"repo_name": "AlChernoff/python_training",
"path": "test/test_edit_contact.py",
"copies": "1",
"size": "1364",
"license": "apache-2.0",
"hash": 1638336901595803100,
"line_mean": 46.0689655172,
"line_max": 106,
"alpha_frac": 0.6480938416,
"autogenerated": false,
"ratio": 3.418546365914787,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4566640207514787,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeksandrcernov'
from pony.orm import *
from datetime import *
from model.group import Group
from model.contact import Contact
from pymysql.converters import decoders
class ORMfixture:
db = Database()
class ORMGroup(db.Entity):
_table_ = 'group_list'
id = PrimaryKey(int, column="group_id")
name = Optional(str, column="group_name")
header = Optional(str, column="group_header")
footer = Optional(str, column="group_footer")
class ORMContact(db.Entity):
_table_ = "addressbook"
id = PrimaryKey(int, column="id")
firstname = Optional(str, column="firstname")
lastname = Optional(str, column="lastname")
deprecated = Optional(datetime, column = 'depricated')
def __init__(self, host, name , user, password):
self.db.bind('mysql', host = host, database = name, user = user, password = password, conv = decoders)
self.db.generate_mapping()
sql_debug(True)
def convert_groups_to_model(self,groups):
def convert(group):
return Group(id = str(group.id), name = group.name, header=group.header, footer = group.footer)
return list(map(convert, groups))
def convert_contacts_to_model(self,contacts):
def convert(contact):
return Contact(id = str(contact.id), firstname= contact.firstname, middlename=contact.middlename, lastname=contact.lastname,
nickname = contact.nickname, title = contact.title,
company = contact.company,
address = contact.address,home_phone = contact.home_phone, mobile_phone = contact.mobile_phone,work_phone = contact.work_phone, fax = contact.fax,byear = contact.byear,
address1 = contact.address1,phone2 = contact.phone2,email = contact.email,email2 = contact.email2, email3 = contact.email3)
return list(map(convert, contacts))
@db_session
def get_group_list(self):
return self.convert_groups_to_model(select(g for g in ORMfixture.ORMGroup))
@db_session
def get_contact_list(self):
return self.convert_contacts_to_model(select(c for c in ORMfixture.ORMContact if c.deprecated is None)) | {
"repo_name": "AlChernoff/python_training",
"path": "fixture/orm.py",
"copies": "1",
"size": "2232",
"license": "apache-2.0",
"hash": -569014513225881660,
"line_mean": 40.3518518519,
"line_max": 179,
"alpha_frac": 0.6527777778,
"autogenerated": false,
"ratio": 3.8219178082191783,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4974695586019178,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeksandrcernov'
import mysql.connector
from model.contact import Contact
from model.group import Group
class DbFixture:
def __init__(self, host, name , user, password):
self.host = host
self.name = name
self.user = user
self.password = password
self.connection = mysql.connector.connect(host = host, database = name, user = user, password = password)
self.connection.autocommit = True
def get_contacts_list(self):
list=[]
cursor = self.connection.cursor()
try:
cursor.execute("select firstname, middlename,lastname,nickname,"
"title,address,home,mobile,work,fax"
"email,email2,email3,byear,address2,phone2 from addressbook where deprecated = '0000-00-00 00:00:00'")
for row in cursor:
(firstname,middlename,lastname,nickname,title,company,address,home_phone,mobile_phone,work_phone,fax,byear,address1,phone2,email,
email2,email3) = row
list.append(Contact(firstname= firstname, middlename=middlename, lastname=lastname,nickname = nickname, title = title,
company = company,
address = address,home_phone = home_phone, mobile_phone = mobile_phone,work_phone = work_phone, fax = fax,byear = byear,
address1 = address1,phone2 = phone2,email = email,email2 = email2, email3 = email3))
finally:
cursor.close()
return list
def get_group_list(self):
list=[]
cursor = self.connection.cursor()
try:
cursor.execute("select group_id, group_name,group_header,group_footer from group_list")
for row in cursor:
(id,name,header,footer) = row
list.append(Group(id = str(id), name = name, header=header, footer = footer))
finally:
cursor.close()
return list
def destroy(self):
self.connection.close() | {
"repo_name": "AlChernoff/python_training",
"path": "fixture/Db.py",
"copies": "1",
"size": "2024",
"license": "apache-2.0",
"hash": 3252306621075301000,
"line_mean": 38.7058823529,
"line_max": 145,
"alpha_frac": 0.6032608696,
"autogenerated": false,
"ratio": 4.06425702811245,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5167517897712449,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeksandrcernov'
import re
from random import randrange
from test import test_phones
def test_names_on_home_page(app):
old_contacts = app.contacts.get_contacts_list()
index = randrange (len(old_contacts))
name_from_home_page = app.contacts.get_contacts_list()[index]
name_from_edit_page = app.contacts.get_contact_info_from_edit_page(index)
assert name_from_home_page.firstname == merge_firstname_like_on_home_page(name_from_edit_page)
assert name_from_home_page.lastname == merge_lastname_like_on_home_page(name_from_edit_page)
def clear(s):
return re.sub("[() -+]", "", s)
def merge_firstname_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.firstname]))))
def merge_lastname_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.lastname])))) | {
"repo_name": "AlChernoff/python_training",
"path": "test/test_names.py",
"copies": "1",
"size": "1169",
"license": "apache-2.0",
"hash": 4322906921077607400,
"line_mean": 40.7857142857,
"line_max": 98,
"alpha_frac": 0.5782720274,
"autogenerated": false,
"ratio": 3.585889570552147,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4664161597952147,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeksandrcernov'
import re
from random import randrange
def test_phones_on_home_page(app):
old_contacts = app.contacts.get_contacts_list()
index = randrange (len(old_contacts))
contact_from_home_page = app.contacts.get_contacts_list()[index]
contact_from_edit_page = app.contacts.get_contact_info_from_edit_page(index)
assert contact_from_home_page.all_phones_from_home_page == merge_phones_like_on_home_page(contact_from_edit_page)
def test_phones_on_contact_view_page(app):
old_contacts = app.contacts.get_contacts_list()
index = randrange (len(old_contacts))
contact_from_view_page = app.contacts.get_contacts_from_view_page(index)
contact_from_edit_page = app.contacts.get_contact_info_from_edit_page(index)
assert contact_from_view_page.home_phone == contact_from_edit_page.home_phone
assert contact_from_view_page.work_phone == contact_from_edit_page.work_phone
assert contact_from_view_page.mobile_phone == contact_from_edit_page.mobile_phone
assert contact_from_view_page.phone2 == contact_from_edit_page.phone2
def clear(s):
return re.sub("[() -]", "", s)
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.home_phone,contact.mobile_phone,contact.work_phone,contact.phone2]))))
| {
"repo_name": "AlChernoff/python_training",
"path": "test/test_phones.py",
"copies": "1",
"size": "1470",
"license": "apache-2.0",
"hash": -6782900777250492000,
"line_mean": 44.9375,
"line_max": 118,
"alpha_frac": 0.674829932,
"autogenerated": false,
"ratio": 3.259423503325942,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4434253435325942,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adeksandrcernov'
from model.group import Group
import random
import string
import os.path
import jsonpickle
import getopt
import sys
try:
opts,args = getopt.getopt(sys.argv[1:], "n:f:", ["number od groups", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "data/groups.json"
for o,a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits +string.punctuation + " "*10
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata = [Group(name= "", header="", footer="")] + [
Group(name= random_string("name",10), header = random_string("header",20),footer = random_string("footer",20))
for i in range(n)
]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)),"..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options("json",indent = 2)
out.write(jsonpickle.encode(testdata)) | {
"repo_name": "AlChernoff/python_training",
"path": "generator/group.py",
"copies": "1",
"size": "1042",
"license": "apache-2.0",
"hash": -7180291143897491000,
"line_mean": 23.8333333333,
"line_max": 114,
"alpha_frac": 0.6381957774,
"autogenerated": false,
"ratio": 3.1480362537764353,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9240414196506979,
"avg_score": 0.009163566933891394,
"num_lines": 42
} |
import json
from bs4 import BeautifulSoup
import urllib.request
import os,sys
import time
import re
import signal
from contextlib import contextmanager
class TimeoutException(Exception): pass
@contextmanager
def time_limit(seconds):
def signal_handler(signum, frame):
raise TimeoutException
signal.signal(signal.SIGALRM, signal_handler)
signal.alarm(seconds)
try:
yield
finally:
signal.alarm(0)
# The prefix of updated link to get JSON object inserted in the website using AngularJS
JSON_Data_link = "http://ieeexplore.ieee.org/rest/document/"
# Checking existenc of the required directory
def ckdir(dir):
if not os.path.exists(dir):
os.makedirs(dir)
return
def get_response(aurl):
hdr = {'User-Agent':'Mozilla/5.0'}
req = urllib.request.Request(aurl,headers=hdr)
# response = urllib.request.urlopen(req)
while True:
try:
# Waiting 60 seconds to recieve a responser object
with time_limit(30):
response = urllib.request.urlopen(req)
break
except Exception:
print("Error opening url!!")
continue
return response
# Procedure to return a parseable BeautifulSoup object of a given url
def get_soup(aurl):
print(aurl)
response = get_response(aurl)
soup = BeautifulSoup(response,'html.parser')
return soup
# Getting the major details provided in the abstract tab of an article
def get_details(arnumber):
details = {}
try:
details_link = JSON_Data_link + arnumber + "/abstract"
response = get_response(details_link)
content = response.read().decode()
details = json.loads(content)
except Exception:
pass
return details
# Getting the authors' details
def get_authors(arnumber):
details = {}
try:
details_link = JSON_Data_link + arnumber + "/authors"
response = get_response(details_link)
content = response.read().decode()
details = json.loads(content)
except Exception:
pass
return details
# Getting the references JSON structure provided under the References tab of an article
def get_references(arnumber):
references = {}
try:
references_link = JSON_Data_link + arnumber + "/references"
response = get_response(references_link)
content = response.read().decode()
references = json.loads(content)
except Exception:
pass
return references
# Getting the citations JSON structure provided under the References tab of an article
def get_citations(arnumber):
citations = {}
try:
citations_link = JSON_Data_link + arnumber + "/citations"
response = get_response(citations_link)
content = response.read().decode()
citations = json.loads(content)
except Exception:
pass
return citations
# Returning the article dictionary consisting of all the required details
def get_article(arnumber):
article = {}
article["arnumber"] = arnumber
article["details"] = get_details(arnumber)
article["authors"] = get_authors(arnumber)
reference_data = get_references(arnumber)
try:
article["references"] = reference_data['references']
except KeyError:
article["references"] = []
article["citations"] = get_citations(arnumber)
return article
# Article numbers of each article in the issue link
def get_articles(aurl,adir):
soup = get_soup(aurl)
link1 = soup.find('input',{'id':'oqs'})
link0 = soup.find('input',{'id':'submitUrl'})
try:
total_number = soup.find('div',{'class':'results-display'}).find_all('b')[1].get_text()
except IndexError:
total_number = "10"
newurl = 'http://ieeexplore.ieee.org' + link0['value'] + link1['value'] + '&rowsPerPage=' + total_number
fsoup = get_soup(newurl)
try:
articles = fsoup.find('ul',{'class':'results'}).find_all('li')
except AttributeError:
continue
count = 0
count_article = 0
for article in articles:
print('Accessing article index '+str(count_article))
count_article += 1
try:
if article.find('h3').find('a')!= None:
count += 1
article_dir = adir + '/Article ' + str(count)
ckdir(article_dir)
article_no = article.find('span').find('input')['id']
localtime = time.asctime(time.localtime(time.time()))
print('Access time is : '+str(localtime))
print(article_no)
article = get_article(article_no)
with open(article_dir+'/ArticleData.json','w') as outfile:
json.dump(article,outfile)
except AttributeError:
pass
# Creates directories for all the volumes,all the issues of a volume
def get_issues(aurl,adir):
global i
global j
soup = get_soup(aurl)
volumes_dir = adir + '/Volumes'
ckdir(volumes_dir)
METRICS_dict = {}
metrics = soup.find('div',{'class':'jrnl-metrics cf'}).find_all('span')
if metrics == []:
METRICS_dict['Imfact Factor'] = '0'
METRICS_dict['Eigenfactor'] = '0'
METRICS_dict['Article Influence Score'] = '0'
else:
try:
METRICS_dict['Imfact Factor'] = str(metrics[0].get_text())
except IndexError:
METRICS_dict['Imfact Factor'] = '0'
try:
METRICS_dict['Eigenfactor'] = str(metrics[2].get_text())
except IndexError:
METRICS_dict['Eigenfactor'] = '0'
try:
METRICS_dict['Article Influence Score'] = str(metrics[4].get_text())
except IndexError:
METRICS_dict['Article Influence Score'] = '0'
with open(adir+'/metrics.json','w') as outfile:
json.dump(METRICS_dict,outfile)
try:
volumes = soup.find("div",{"class":'volumes'})
years = volumes.find_all('ul')
except AttributeError:
volumes = soup.find("div",{'id':'past-issues'}).find('div',{'class':'oa_years'})
for volume in volumes.find_all('li')[i::]:
volume_dir = volumes_dir + '/' + str(volume.get_text())
ckdir(volume_dir)
issue_dir = volume_dir + '/Issue 1'
ckdir(issue_dir)
issue_url = 'http://ieeexplore.ieee.org' + str(volume.find('a')['href'])
get_articles(issue_url,issue_dir)
return
count_volume = i
for year in years[i:]:
count_volume += 1
print('Accessing Volume '+str(count_volume))
issues = year.find_all('a')
volume_dir = volumes_dir + '/' + str(year['id'].split('-')[1])
ckdir(volume_dir)
print('Volume dir : '+str(volume_dir))
count_issue = j
for issue in issues[j:]:
count_issue += 1
print("Accessing issue "+str(count_issue))
try:
issue_no = re.findall(r'Issue: [0-9]+',str(issue.get_text()))[0]
except IndexError:
issue_no = re.findall(r'Issue: [A-Z][0-9]+',str(issue.get_text()))[0]
print("Issue Name : "+str(issue_no))
issue_no = re.sub('[^a-zA-Z0-9 ]','',issue_no)
issue_dir = volume_dir + '/' + issue_no
ckdir(issue_dir)
issue_url = 'http://ieeexplore.ieee.org' + str(issue['href'])
# print('Issue Url : ' + issue_url)
get_articles(issue_url,issue_dir)
j = 0
if __name__ == "__main__":
if len(sys.argv) != 3 and len(sys.argv) != 4 and len(sys.argv) != 5:
print('Illegal number of arguments!! Exiting...')
sys.exit(1)
x = int(sys.argv[1]) - 1
y = int(sys.argv[2]) - 1
i = 0
j = 0
if len(sys.argv) == 4:
i = int(sys.argv[3]) - 1
if i < 0 :
print('Invalid volume number...!! Exiting...')
sys.exit()
if len(sys.argv) == 5:
i = int(sys.argv[3]) - 1
if i < 0 :
print('Invalid volume number...!! Exiting...')
sys.exit()
j = int(sys.argv[4]) - 1
if j < 0 :
print('Invalid volume number...!! Exiting...')
sys.exit()
if x>=y :
print('Invalid journal indices..!! Exiting...')
sys.exit()
print('Accessing journals from '+ str(x)+' to ' + str(y))
# loading journal links from data file
with open('../data/Journal_data.json','r') as infile:
Journals_data = json.load(infile)
base_dir = '../output/Journal Data'
ckdir(base_dir)
count_journal = x
for record in Journals_data['records'][x:y:]:
count_journal += 1
if record['vj'] != True:
print('Accessing journal '+str(count_journal))
print(record['title'])
journal_dir = base_dir + '/'+record['title']
ckdir(journal_dir)
full_url = 'http://ieeexplore.ieee.org' + str(record['publicationLink'])
get_issues(full_url,journal_dir)
i = 0 | {
"repo_name": "vintageplayer/Scrape_Scientific_Journal_Data",
"path": "IEEE/src/scrape_v2.py",
"copies": "1",
"size": "8711",
"license": "mit",
"hash": 6597323317522951000,
"line_mean": 28.1371237458,
"line_max": 114,
"alpha_frac": 0.613247618,
"autogenerated": false,
"ratio": 3.1222222222222222,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9017865587505073,
"avg_score": 0.04352085054342989,
"num_lines": 299
} |
import json
from bs4 import BeautifulSoup
import urllib.request
import signal
from contextlib import contextmanager
class TimeoutException(Exception): pass
@contextmanager
def time_limit(seconds):
def signal_handler(signum, frame):
raise TimeoutException
signal.signal(signal.SIGALRM, signal_handler)
signal.alarm(seconds)
try:
yield
finally:
signal.alarm(0)
# The prefix of updated link to get JSON object inserted in the website using AngularJS
JSON_Data_link = "http://ieeexplore.ieee.org/rest/document/"
def get_response(aurl):
hdr = {'User-Agent':'Mozilla/5.0'}
req = urllib.request.Request(aurl,headers=hdr)
# response = urllib.request.urlopen(req)
while True:
try:
# Waiting 60 seconds to recieve a responser object
with time_limit(30):
response = urllib.request.urlopen(req)
break
except Exception:
print("Error opening url!!")
continue
return response
# Procedure to return a parseable BeautifulSoup object of a given url
def get_soup(aurl):
response = get_response(aurl)
soup = BeautifulSoup(response,'html.parser')
return soup
# Getting the major details provided in the abstract tab of an article
def get_details(arnumber):
details = {}
try:
details_link = JSON_Data_link + arnumber + "/abstract"
response = get_response(details_link)
content = response.read().decode()
details = json.loads(content)
except Exception:
pass
return details
# Getting the references JSON structure provided under the References tab of an article
def get_references(arnumber):
references = {}
try:
references_link = JSON_Data_link + arnumber + "/references"
response = get_response(references_link)
content = response.read().decode()
references = json.loads(content)
except Exception:
pass
return references
# Getting the citations JSON structure provided under the References tab of an article
def get_citations(arnumber):
citations = {}
try:
citations_link = JSON_Data_link + arnumber + "/citations"
response = get_response(citations_link)
content = response.read().decode()
citations = json.loads(content)
except Exception:
pass
return citations
# Returning the article dictionary consisting of all the required details
def get_article(arnumber):
article = {}
article["arnumber"] = arnumber
article["details"] = get_details(arnumber)
article["references"] = get_references(arnumber)['references']
article["citations"] = get_citations(arnumber)
return article
def get_nested_references(article_dict,n):
article_dict['referenced_articles'] = []
for reference in article_dict['references']:
try:
if len(reference['links']) >= 5:
print('Getting next referred article. n='+str(n))
reference_link = reference['links']['documentLink']
reference_number = reference_link.split('/').pop()
referred_article = get_article(reference_number)
if n > 1:
get_nested_references(referred_article,(n-1))
# referred_article['referenced_articles'] = []
# for reference2 in referred_article['references']:
# try:
# if len(reference2['links']) >= 5:
# print('Getting next level 2 referred article.')
# reference_link2 = reference2['links']['documentLink']
# reference_number2 = reference_link2.split('/').pop()
# referred_article2 = get_article(reference_number2)
# referred_article['referenced_articles'].append(referred_article2)
# except KeyError:
# pass
article_dict['referenced_articles'].append(referred_article)
except KeyError:
pass
if __name__ == '__main__':
# arnumber = "1104315"
arnumbers = ["4399115","4069507","752023"]
count = 1
for arnumber in arnumbers:
count += 1
print('Getting main article. Article count : '+str(count))
article_dict = get_article(arnumber)
# """ Uncomment the following two lines if only the article structure is required"""
# # with open('./output.json','w') as outfile:
# # json.dump(article_dict,outfile)
# """ Comment the following lines if next two level of reference article dnetails are not required"""
get_nested_references(article_dict,3)
# # article_dict['referenced_articles'] = []
# # for reference in article_dict['references']:
# # try:
# # if len(reference['links']) >= 5:
# # print('Getting next referred article.')
# # reference_link = reference['links']['documentLink']
# # reference_number = reference_link.split('/').pop()
# # referred_article = get_article(reference_number)
# # referred_article['referenced_articles'] = []
# # for reference2 in referred_article['references']:
# # try:
# # if len(reference2['links']) >= 5:
# # print('Getting next level 2 referred article.')
# # reference_link2 = reference2['links']['documentLink']
# # reference_number2 = reference_link2.split('/').pop()
# # referred_article2 = get_article(reference_number2)
# # referred_article['referenced_articles'].append(referred_article2)
# # except KeyError:
# # pass
# # article_dict['referenced_articles'].append(referred_article)
# # except KeyError:
# # pass
with open('./mitchell'+str(count)+'.json','w') as outfile:
json.dump(article_dict,outfile) | {
"repo_name": "vintageplayer/Scrape_Scientific_Journal_Data",
"path": "IEEE/src/scrape_nested.py",
"copies": "1",
"size": "5375",
"license": "mit",
"hash": -6904235561784284000,
"line_mean": 28.0594594595,
"line_max": 103,
"alpha_frac": 0.6898604651,
"autogenerated": false,
"ratio": 3.2674772036474162,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44573376687474164,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adityabhasin'
import random
from faker import Faker
from faker.providers import BaseProvider
class TechDataProvider(BaseProvider):
base = '"name" : "{{att_name}}", "category" : "{{ cat_name }}", "parameters" : '
# bunch of pattersn for the Item properties. These are randomly chosed per item
property_patterns = ['{"param_name" : "{{param_name_number}}" , "param_value" : "{{param_value_number}}"}',
'{"param_name" : "{{param_name_flag}}" , "param_value" : "{{param_value_flag}}"}',
'{"param_name" : "{{param_name_simple}}" , "param_value" : "{{param_value_simple}}"}']
# Attribute/Item names.
att_names = ["cassandra", "mongo", "elasticsearch", "lucene", "hibernate", "gigaspace", "storm", "oracle", "python",
"java", "ruby", "spring", "scala", "mysql", "postgresql", "couchbase", "hadoop", "ELK", "splunk"]
# A set of properties for techs that have number based values. Values are a random generated number
param_name_numbers = ["write speed", "read speed", "released"]
# A set of properties for techs that have flag based values
param_name_flags = ["distributed", "open source", "active dev", "support community"]
param_value_flags = ["True", "False"]
# A set of properties for tech that are strings. The _value_simples are the set of values that can be chosen
param_name_simples = ["type", ]
# Values that can be permuted against the param_name_simples
param_value_simples = ["db", "search", "programming language", "cloud infra", "in-memory solution", "log analysis"]
cat_names = ["cat1", "cat2", "cat3"]
def tech_data(self):
current = self.generate_pattern()
return self.generator.parse(current)
@classmethod
def att_name(cls):
return cls.random_element(cls.att_names)
@classmethod
def att_desc(cls):
return cls.random_element(cls.att_names)
@classmethod
def param_name_simple(cls):
return cls.random_element(cls.param_name_simples)
@classmethod
def param_value_simple(cls):
return cls.random_element(cls.param_value_simples)
@classmethod
def param_name_number(cls):
return cls.random_element(cls.param_name_numbers)
@classmethod
def param_value_number(cls):
# range for any number properties from which a random number will be created
# the parser expects all values to be strings, so wrapping around str
return str(random.randint(500, 5000))
@classmethod
def param_name_flag(cls):
return cls.random_element(cls.param_name_flags)
@classmethod
def param_value_flag(cls):
return cls.random_element(cls.param_value_flags)
@classmethod
def cat_name(cls):
return cls.random_element(cls.cat_names)
# for each loop generate a pattern by created a random number of propertes for each Item. These will then be parsed
# by the generator and use the values in arrays above to randomly create sample data
def generate_pattern(self):
props = []
for i in range(random.randint(10,15)):
props.append(self.random_element(self.property_patterns))
return "{" + self.base + "[" + ",".join(props) + "]}"
if __name__ == "__main__":
fake = Faker()
fake.add_provider(TechDataProvider)
# fake.seed(1234)
# create 10 items, increase for more
for _ in range(30):
print(fake.tech_data())
| {
"repo_name": "conlini/h2h",
"path": "tech/utilities/_SampleGenerator.py",
"copies": "1",
"size": "3494",
"license": "mit",
"hash": -7744425610355364000,
"line_mean": 35.0206185567,
"line_max": 120,
"alpha_frac": 0.6390955924,
"autogenerated": false,
"ratio": 3.736898395721925,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9860002776520866,
"avg_score": 0.003198242320211755,
"num_lines": 97
} |
###AUTHOR: ADITYA D PAI
###------SCRIPT FOR GEREATING IDEAL INVESTMENTS-------
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, preprocessing
import pandas as pd
from matplotlib import style
import statistics
from collections import Counter
import warnings
from sklearn.ensemble import RandomForestClassifier
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=UserWarning)
#initialize list of features
features = ['DE Ratio', 'Trailing P/E', 'Price/Sales',
'Price/Book', 'Profit Margin', 'Operating Margin',
'Return on Assets', 'Return on Equity', 'Revenue Per Share',
'Market Cap', 'Enterprise Value', 'Forward P/E',
'PEG Ratio', 'Enterprise Value/Revenue', 'Enterprise Value/EBITDA',
'Revenue', 'Gross Profit', 'EBITDA',
'Net Income Avl to Common ', 'Diluted EPS', 'Earnings Growth',
'Revenue Growth', 'Total Cash', 'Total Cash Per Share',
'Total Debt', 'Current Ratio', 'Book Value Per Share',
'Cash Flow', 'Beta', 'Held by Insiders',
'Held by Institutions', 'Shares Short (as of', 'Short Ratio',
'Short % of Float', 'Shares Short (prior ']
def performanceMargin(stock, sp500): ###identify outperforming stocks
#set performance margin
margin = 15
difference = stock - sp500
if difference > margin:
return 1
else:
return 0
def buildDataSet():
###select dataset
# data_df = pd.DataFrame.from_csv("key_stats_reduced_enhanced.csv")
data_df = pd.DataFrame.from_csv("key_stats_full_enhanced.csv")
# shuffle data:
data_df = data_df.reindex(np.random.permutation(data_df.index))
data_df = data_df.replace("NaN",0).replace("N/A",0)
#get outperforming stocks
data_df["Status2"] = list(map(performanceMargin, data_df["stock_p_change"], data_df["sp500_p_change"]))
X = np.array(data_df[features].values)#.tolist())
X = preprocessing.scale(X)
y = ( data_df["Status2"].values.tolist())
return X,y
def analysis(): ###function for generating outperforming stocks
#initial parameters
test_size = 1
amount = 10000
inv_made = 0
market_earn = 0
pred_invest = 0
X,y= buildDataSet()
#Create Model
clf=RandomForestClassifier(max_features=None, oob_score=True)
clf.fit(X[:-test_size],y[:-test_size])
data_df = pd.DataFrame.from_csv("forward_sample_full.csv")
data_df = data_df.replace("NaN",0).replace("N/A",0)
X = np.array(data_df[features].values)
X = preprocessing.scale(X)
Z = data_df["Ticker"].values.tolist()
invest_list = []
#return list of outperforming stocks
for i in range(len(X)):
p = clf.predict(X[i])[0]
if p == 1:
invest_list.append(Z[i])
return invest_list
def getInvestments(): ###function for filtering ideal investments
final_list = []
loops = 3
#generate consolidated list of outperforming stocks
for x in range(loops):
print("Iteration: "+str(x+1))
stock_list = analysis()
for e in stock_list:
final_list.append(e)
x = Counter(final_list)
print(x)
print('_'*120)
invest_in=[]
count=0
#generate filtered list of stocks for investment
for each in x:
if (x[each] >2) and count<15:
invest_in.append(each)
count+=1
print ("Strategized Investments: "+str(sorted(invest_in)))
getInvestments()
| {
"repo_name": "aditya-pai/stockPrediction",
"path": "get_invests.py",
"copies": "1",
"size": "3365",
"license": "mit",
"hash": 3773312737656499700,
"line_mean": 26.5819672131,
"line_max": 105,
"alpha_frac": 0.667756315,
"autogenerated": false,
"ratio": 3.2957884427032322,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9229571328630779,
"avg_score": 0.04679468581449079,
"num_lines": 122
} |
from __future__ import print_function
from .common import isvec
from .common import ishomog
from math import sqrt
from numpy import trace
from .transforms import *
from ..tests.test_common import *
from .graphics import *
class Quaternion:
def __init__(self, s=None, v=None):
"""
A quaternion is a compact method of representing a 3D rotation that has
computational advantages including speed and numerical robustness.
A quaternion has 2 parts, a scalar s, and a vector v and is typically written::
q = s <vx vy vz>
A unit quaternion is one for which M{s^2+vx^2+vy^2+vz^2 = 1}.
A quaternion can be considered as a rotation about a vector in space where
q = cos (theta/2) sin(theta/2) <vx vy vz>
where <vx vy vz> is a unit vector.
:param s: scalar
:param v: vector
"""
if v is None:
self.v = np.matrix([[0, 0, 0]])
else:
assert isvec(v, 3)
self.v = v
if s is None:
self.s = 0
else:
assert type(s) is float or type(s) is int
self.s = s
@classmethod
def qt(cls, arg_in):
assert type(arg_in) is Quaternion
return cls(s=arg_in.s, v=arg_in.v)
@classmethod
def pure(cls, vec):
assert isvec(vec, 3)
return cls(s=0, v=vec)
def conj(self):
return Quaternion(s=self.s, v=-self.v)
def inv(self):
return Quaternion(s=self.s, v=-self.v)
def tr(self):
return t2r(self.r())
def norm(self):
"""Return the norm of this quaternion.
Code retrieved from: https://github.com/petercorke/robotics-toolbox-python/blob/master/robot/Quaternion.py
Original authors: Luis Fernando Lara Tobar and Peter Corke
@rtype: number
@return: the norm
"""
return np.linalg.norm(self.double())
def double(self):
"""Return the quaternion as 4-element vector.
Code retrieved from: https://github.com/petercorke/robotics-toolbox-python/blob/master/robot/Quaternion.py
Original authors: Luis Fernando Lara Tobar and Peter Corke
@rtype: 4-vector
@return: the quaternion elements
"""
return np.concatenate((np.matrix(self.s), self.v), 1)
def unit(self):
"""Return an equivalent unit quaternion
Code retrieved from: https://github.com/petercorke/robotics-toolbox-python/blob/master/robot/Quaternion.py
Original authors: Luis Fernando Lara Tobar and Peter Corke
@rtype: quaternion
@return: equivalent unit quaternion
"""
qr = UnitQuaternion()
nm = self.norm()
qr.s = float(self.s / nm)
qr.v = self.v / nm
return qr
def r(self):
"""Return an equivalent rotation matrix.
Code retrieved from: https://github.com/petercorke/robotics-toolbox-python/blob/master/robot/Quaternion.py
Original authors: Luis Fernando Lara Tobar and Peter Corke
@rtype: 3x3 orthonormal rotation matrix
@return: equivalent rotation matrix
"""
s = self.s
x = self.v[0, 0]
y = self.v[0, 1]
z = self.v[0, 2]
return np.matrix([[1 - 2 * (y ** 2 + z ** 2), 2 * (x * y - s * z), 2 * (x * z + s * y)],
[2 * (x * y + s * z), 1 - 2 * (x ** 2 + z ** 2), 2 * (y * z - s * x)],
[2 * (x * z - s * y), 2 * (y * z + s * x), 1 - 2 * (x ** 2 + y ** 2)]])
def matrix(self):
s = self.s
x = self.v[0, 0]
y = self.v[0, 1]
z = self.v[0, 2]
return np.matrix([[s, -x, -y, -z],
[x, s, -z, y],
[y, z, s, -x],
[z, -y, x, s]])
def __mul__(self, other):
assert isinstance(other, Quaternion) \
or isinstance(other, int) \
or isinstance(other, float), "Can be multiplied with Quaternion, int or a float. "
if type(other) is Quaternion:
qr = Quaternion()
else:
qr = UnitQuaternion()
if isinstance(other, Quaternion):
qr.s = self.s * other.s - self.v * np.transpose(other.v)
qr.v = self.s * other.v + other.s * self.v + np.cross(self.v, other.v)
elif type(other) is int or type(other) is float:
qr.s = self.s * other
qr.v = self.v * other
return qr
def __pow__(self, power, modulo=None):
"""
Code retrieved from: https://github.com/petercorke/robotics-toolbox-python/blob/master/robot/Quaternion.py
Original authors: Luis Fernando Lara Tobar and Peter Corke
:param power:
:param modulo:
:return:
"""
assert type(power) is int, "Power must be an integer"
qr = Quaternion()
q = Quaternion.qt(self)
for i in range(0, abs(power)):
qr = qr * q
if power < 0:
qr = qr.inv()
return qr
def __imul__(self, other):
"""
Code retrieved from: https://github.com/petercorke/robotics-toolbox-python/blob/master/robot/Quaternion.py
Original authors: Luis Fernando Lara Tobar and Peter Corke
:param other:
:return: self
"""
if isinstance(other, Quaternion):
s1 = self.s
v1 = self.v
s2 = other.s
v2 = other.v
# form the product
self.s = s1 * s2 - v1 * v2.T
self.v = s1 * v2 + s2 * v1 + np.cross(v1, v2)
elif type(other) is int or type(other) is float:
self.s *= other
self.v *= other
return self
def __add__(self, other):
assert isinstance(other, Quaternion), "Both objects should be of type: Quaternion"
return Quaternion(s=self.s + other.s, v=self.v + other.v)
def __sub__(self, other):
assert isinstance(other, Quaternion), "Both objects should be of type: Quaternion"
return Quaternion(s=self.s - other.s, v=self.v - other.v)
def __truediv__(self, other):
assert isinstance(other, Quaternion) or isinstance(other, int) or isinstance(other,
float), "Can be divided by a " \
"Quaternion, " \
"int or a float "
qr = Quaternion()
if type(other) is Quaternion:
qr = self * other.inv()
elif type(other) is int or type(other) is float:
qr.s = self.s / other
qr.v = self.v / other
return qr
def __eq__(self, other):
# assert type(other) is Quaternion
try:
np.testing.assert_almost_equal(self.s, other.s)
except AssertionError:
return False
if not matrices_equal(self.v, other.v, decimal=7):
return False
return True
def __ne__(self, other):
if self == other:
return False
else:
return True
def __repr__(self):
return "%f <%f, %f, %f>" % (self.s, self.v[0, 0], self.v[0, 1], self.v[0, 2])
def __str__(self):
return self.__repr__()
class UnitQuaternion(Quaternion):
def __init__(self, s=None, v=None):
self.pipeline = None
if s is None:
s = 1
if v is None:
v = np.matrix([[0, 0, 0]])
super().__init__(s, v)
@classmethod
def rot(cls, arg_in):
qr = cls()
return qr.tr2q(arg_in)
@classmethod
def qt(cls, arg_in):
if type(arg_in) is Quaternion:
arg_in = arg_in.unit()
else:
assert type(arg_in) is UnitQuaternion
return cls(arg_in.s, arg_in.v)
@classmethod
def eul(cls, arg_in, unit='rad'):
assert isvec(arg_in, 3)
return cls.rot(eul2r(phi=arg_in, unit=unit))
@classmethod
def rpy(cls, arg_in, unit='rad'):
return cls.rot(rpy2r(thetas=arg_in, unit=unit))
@classmethod
def angvec(cls, theta, v, unit='rad'):
assert isvec(v, 3)
assert type(theta) is float or type(theta) is int
uq = UnitQuaternion()
if unit == 'deg':
theta = theta * math.pi / 180
uq.s = math.cos(theta/2)
uq.v = math.sin(theta/2) * unitize(v)
return uq
@classmethod
def omega(cls, w):
assert isvec(w, 3)
theta = np.linalg.norm(w)
s = math.cos(theta / 2)
v = math.sin(theta / 2) * unitize(w)
return cls(s=s, v=v)
@classmethod
def Rx(cls, angle, unit='rad'):
return cls.rot(rotx(angle, unit=unit))
@classmethod
def Ry(cls, angle, unit='rad'):
return cls.rot(roty(angle, unit=unit))
@classmethod
def Rz(cls, angle, unit='rad'):
return cls.rot(rotz(angle, unit=unit))
@classmethod
def vec(cls, arg_in):
assert isvec(arg_in, 3)
s = 1 - np.linalg.norm(arg_in)
return cls(s=s, v=arg_in)
def new(self):
return UnitQuaternion(s=self.s, v=self.v)
def dot(self, omega):
E = self.s * np.asmatrix(np.eye(3, 3)) - skew(self.v)
qd = -self.v * omega
return 0.5 * np.r_[qd, E*omega]
def dotb(self, omega):
E = self.s * np.asmatrix(np.eye(3, 3)) + skew(self.v)
qd = -self.v * omega
return 0.5 * np.r_[qd, E*omega]
def plot(self):
from .pose import SO3
SO3.np(self.r()).plot()
def animate(self, qr=None, duration=5, gif=None):
self.pipeline = VtkPipeline(total_time_steps=duration*60, gif_file=gif)
axis = vtk.vtkAxesActor()
axis.SetAxisLabels(0)
self.pipeline.add_actor(axis)
if qr is None:
q1 = UnitQuaternion()
q2 = self
else:
assert type(qr) is UnitQuaternion
q1 = self
q2 = qr
cube_axes = axesCube(self.pipeline.ren)
self.pipeline.add_actor(cube_axes)
def execute(obj, event):
# print(self.timer_count)
nonlocal axis
self.pipeline.timer_tick()
axis.SetUserMatrix(np2vtk(q1.interp(q2, r=1/self.pipeline.total_time_steps * self.pipeline.timer_count).q2tr()))
self.pipeline.iren.GetRenderWindow().Render()
self.pipeline.iren.AddObserver('TimerEvent', execute)
self.pipeline.animate()
def matrix(self):
pass
def interp(self, qr, r=0.5, shortest=False):
"""
Algorithm source: https://en.wikipedia.org/wiki/Slerp
:param qr: UnitQuaternion
:param shortest: Take the shortest path along the great circle
:param r: interpolation point
:return: interpolated UnitQuaternion
"""
assert type(qr) is UnitQuaternion
if self == qr:
return self
q1 = self.double()
q2 = qr.double()
dot = q1*np.transpose(q2)
# If the dot product is negative, the quaternions
# have opposite handed-ness and slerp won't take
# the shorter path. Fix by reversing one quaternion.
if shortest:
if dot < 0:
q1 = - q1
dot = -dot
dot = np.clip(dot, -1, 1) # Clip within domain of acos()
theta_0 = math.acos(dot) # theta_0 = angle between input vectors
theta = theta_0 * r # theta = angle between v0 and result
s1 = float(math.cos(theta) - dot * math.sin(theta) / math.sin(theta_0))
s2 = math.sin(theta) / math.sin(theta_0)
out = (q1 * s1) + (q2 * s2)
return UnitQuaternion(s=float(out[0, 0]), v=out[0, 1:])
def to_vec(self):
if self.s < 0:
return -self.v
else:
return self.v
def to_rpy(self):
return tr2rpy(self.r())
def to_angvec(self, unit='rad'):
vec, theta = 0, 0
if np.linalg.norm(self.v) < 10 * np.spacing([1])[0]:
vec = np.matrix([[0, 0, 0]])
theta = 0
else:
vec = unitize(vec)
theta = 2 * math.atan2(np.linalg.norm(self.v), self.s)
if unit == 'deg':
theta = theta * 180 / math.pi
return theta, vec
def to_so3(self):
from .pose import SO3
return SO3.np(self.r())
def to_se3(self):
from .pose import SE3
from .pose import SO3
return SE3(so3=SO3.np(self.r()))
def to_rot(self):
q = self.double()
s = q[0, 0]
x = q[0, 1]
y = q[0, 2]
z = q[0, 3]
return np.matrix([[1 - 2 * (y ** 2 + z ** 2), 2 * (x * y - s * z), 2 * (x * z + s * y)],
[2 * (x * y + s * z), 1 - 2 * (x ** 2 + z ** 2), 2 * (y * z - s * x)],
[2 * (x * z - s * y), 2 * (y * z + s * x), 1 - 2 * (x ** 2 + y ** 2)]])
def q2r(self):
return self.to_rot()
def q2tr(self):
return r2t(self.to_rot())
@staticmethod
def tr2q(t):
"""
Converts a homogeneous rotation matrix to a Quaternion object
Code retrieved from: https://github.com/petercorke/robotics-toolbox-python/blob/master/robot/Quaternion.py
Original authors: Luis Fernando Lara Tobar and Peter Corke
:param t: homogeneous matrix
:return: quaternion object
"""
assert ishomog(t, (3, 3)), "Argument must be 3x3 homogeneous numpy matrix"
qs = sqrt(trace(t) + 1) / 2.0
kx = t[2, 1] - t[1, 2] # Oz - Ay
ky = t[0, 2] - t[2, 0] # Ax - Nz
kz = t[1, 0] - t[0, 1] # Ny - Ox
if (t[0, 0] >= t[1, 1]) and (t[0, 0] >= t[2, 2]):
kx1 = t[0, 0] - t[1, 1] - t[2, 2] + 1 # Nx - Oy - Az + 1
ky1 = t[1, 0] + t[0, 1] # Ny + Ox
kz1 = t[2, 0] + t[0, 2] # Nz + Ax
add = (kx >= 0)
elif t[1, 1] >= t[2, 2]:
kx1 = t[1, 0] + t[0, 1] # Ny + Ox
ky1 = t[1, 1] - t[0, 0] - t[2, 2] + 1 # Oy - Nx - Az + 1
kz1 = t[2, 1] + t[1, 2] # Oz + Ay
add = (ky >= 0)
else:
kx1 = t[2, 0] + t[0, 2] # Nz + Ax
ky1 = t[2, 1] + t[1, 2] # Oz + Ay
kz1 = t[2, 2] - t[0, 0] - t[1, 1] + 1 # Az - Nx - Oy + 1
add = (kz >= 0)
if add:
kx = kx + kx1
ky = ky + ky1
kz = kz + kz1
else:
kx = kx - kx1
ky = ky - ky1
kz = kz - kz1
kv = np.matrix([[kx, ky, kz]])
nm = np.linalg.norm(kv)
qr = UnitQuaternion()
if nm == 0:
qr.s = 1.0
qr.v = np.matrix([[0.0, 0.0, 0.0]])
else:
qr.s = qs
qr.v = (sqrt(1 - qs ** 2) / nm) * kv
return qr
def __matmul__(self, other):
assert type(other) is UnitQuaternion
return (self * other).unit()
def __floordiv__(self, other):
assert type(other) is UnitQuaternion
return (self / other).unit()
| {
"repo_name": "adityadua24/robopy",
"path": "robopy/base/quaternion.py",
"copies": "1",
"size": "15217",
"license": "mit",
"hash": 6086196403820201000,
"line_mean": 31.4456289979,
"line_max": 124,
"alpha_frac": 0.5072616153,
"autogenerated": false,
"ratio": 3.266852726492057,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4274114341792057,
"avg_score": null,
"num_lines": null
} |
__author__ = 'aditya'
import wbdata,datetime,time
from flask import session
from modules.rpy2_wrap import functions
def get_countries():
if 'countries' not in session:
session['countries'] = wbdata.get_country(display=False)
return session['countries']
def make_key(oldkey):
year, id = oldkey[1].year, 'XX'
for x in get_countries():
if oldkey[0] == x['name']:
id = x['iso2Code']
if id:
return id + '.' + str(year)
jvector_countries = {'BD', 'BE', 'BF', 'BG', 'BA', 'BN', 'BO', 'JP', 'BI', 'BJ', 'BT', 'JM', 'BW', 'BR', 'BS', 'BY',
'BZ', 'RU', 'RW', 'RS', 'LT', 'LU', 'LR', 'RO', 'GW', 'GT', 'GR', 'GQ', 'GY', 'GE', 'GB', 'GA',
'GN', 'GM', 'GL', 'KW', 'GH', 'OM', '_1', '_0', 'JO', 'HR', 'HT', 'HU', 'HN', 'PR', 'PS', 'PT',
'PY', 'PA', 'PG', 'PE', 'PK', 'PH', 'PL', '-99', 'ZM', 'EH', 'EE', 'EG', 'ZA', 'EC', 'AL', 'AO',
'KZ', 'ET', 'ZW', 'ES', 'ER', 'ME', 'MD', 'MG', 'MA', 'UZ', 'MM', 'ML', 'MN', 'MK', 'MW', 'MR',
'UG', 'MY', 'MX', 'VU', 'FR', 'FI', 'FJ', 'FK', 'NI', 'NL', 'NO', 'NA', 'NC', 'NE', 'NG', 'NZ',
'NP', 'CI', 'CH', 'CO', 'CN', 'CM', 'CL', 'CA', 'CG', 'CF', 'CD', 'CZ', 'CY', 'CR', 'CU', 'SZ',
'SY', 'KG', 'KE', 'SS', 'SR', 'KH', 'SV', 'SK', 'KR', 'SI', 'KP', 'SO', 'SN', 'SL', 'SB', 'SA',
'SE', 'SD', 'DO', 'DJ', 'DK', 'DE', 'YE', 'AT', 'DZ', 'US', 'LV', 'UY', 'LB', 'LA', 'TW', 'TT',
'TR', 'LK', 'TN', 'TL', 'TM', 'TJ', 'LS', 'TH', 'TF', 'TG', 'TD', 'LY', 'AE', 'VE', 'AF', 'IQ',
'IS', 'IR', 'AM', 'IT', 'VN', 'AR', 'AU', 'IL', 'IN', 'TZ', 'AZ', 'IE', 'ID', 'UA', 'QA', 'MZ'}
def get_country_code(name):
for x in get_countries():
if name == x['name']:
if x['iso2Code'] in jvector_countries:
return x['iso2Code']
return None
def get_data(from_date=datetime.datetime(2010, 1, 1), to_date=datetime.datetime.now(), variable="FR.INR.LEND"):
duration = (from_date, to_date)
variable = variable.upper()
mykey = '-'.join(map(str, [from_date.year, to_date.year, variable]))
return functions.get_values(wbdata.get_data(variable, data_date=duration))
# this attaches data to every message as cookie, slowing down the system. Dont use this.
# if mykey not in session:
# session[mykey] = functions.get_values(wbdata.get_data(variable, data_date=duration))
# return session[mykey]
def get_categories():
categories = [(x['id'], x['name']) for x in wbdata.get_source(display=False)]
return categories
def get_indicators(i, name):
indicators = wbdata.get_indicator(source = i, display=False)
return indicators
| {
"repo_name": "ca9/world-stats",
"path": "modules/opendataGet/functions.py",
"copies": "1",
"size": "2783",
"license": "mit",
"hash": -3853137439785723400,
"line_mean": 46.9827586207,
"line_max": 117,
"alpha_frac": 0.479338843,
"autogenerated": false,
"ratio": 2.7581764122893953,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.37375152552893953,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Aditya Vikram Agarwal'
import pygame
from pygame.locals import *
from random import randint
import board
import scoreboard
class Game:
def __init__(self, background, quitimage, yes_image, no_image, restart_image):
pygame.font.init()
self.screen = pygame.display.set_mode((1200, 620), DOUBLEBUF)
self.screen.set_alpha(None)
self.clock = pygame.time.Clock()
self.background = pygame.image.load(background)
self.quitimage = pygame.image.load(quitimage)
self.yesimage = pygame.image.load(yes_image)
self.noimage = pygame.image.load(no_image)
self.restart = pygame.image.load(restart_image)
self.background = pygame.transform.scale(self.background, (1200, 620))
self.quitimage = pygame.transform.scale(self.quitimage, (400, 60))
self.yesimage = pygame.transform.scale(self.yesimage, (50, 50))
self.noimage = pygame.transform.scale(self.noimage, (50, 50))
self.restart = pygame.transform.scale(self.restart, (400, 60))
self.screen.blit(self.background, self.background.get_rect())
pygame.mixer.init()
pygame.mixer.music.load("Sound/GameMusic.mp3")
pygame.mixer.music.play()
pygame.display.flip()
def run(self): # The function which runs the game
board1 = board.Board(self.screen,1)
stateright = 0
stateleft = 0
stateup = 0
statedown = 0
jumpstate = 0
timer = 0
jumpspeed = 0
donkeytimer = 0
fireballfrequency = (60, 80)
lim = randint(fireballfrequency[0], fireballfrequency[1])
scoreboard1 = scoreboard.ScoreBoard("Images/scoreboard.png", board1.getPlayerScore(), self.screen,
"Images/liveplayer.png")
while 1:
quitstate = 0
fireballhitme = 0
self.screen.set_alpha(None)
if timer == lim:
board1.createfireball()
timer = 0
lim = randint(fireballfrequency[0], fireballfrequency[1])
timer += 1
prevScore = board1.getPlayerScore()
ladderstate = board1.getLadderCollisions()
collectCoin = board1.getCoinCollisions()
self.clock.tick(30)
pygame.key.set_repeat()
for ev in pygame.event.get():
if ev.type == QUIT:
quitstate = 1
if not hasattr(ev, 'key'):
continue
if ev.type == KEYDOWN and (ev.key == K_RIGHT or ev.key == K_d):
stateright = 1
if ev.type == KEYUP and (ev.key == K_RIGHT or ev.key == K_d):
stateright = 0
if ev.type == KEYDOWN and (ev.key == K_LEFT or ev.key == K_a):
stateleft = 1
if ev.type == KEYUP and (ev.key == K_LEFT or ev.key == K_a):
stateleft = 0
if ev.type == KEYDOWN and (ev.key == K_UP or ev.key == K_w):
stateup = 1
if ev.type == KEYUP and (ev.key == K_UP or ev.key == K_w):
stateup = 0
if ev.type == KEYDOWN and (ev.key == K_DOWN or ev.key == K_s):
statedown = 1
if ev.type == KEYUP and (ev.key == K_DOWN or ev.key == K_s):
statedown = 0
if ev.type == KEYDOWN and ev.key == K_SPACE and jumpstate == 0:
jumpstate = 1
jumpspeed = 10
if ev.key == K_ESCAPE:
quitstate = 1
if quitstate == 1:
if self.confirmquit() == 1:
return -1
board1.updatefireballs()
donkeytimer += 1
if donkeytimer == 10:
donkeytimer = 0
board1.updatedonkey(1)
else:
board1.updatedonkey(0)
if stateup == 1 and ladderstate == 1:
board1.key_pressed(3)
dead = board1.checkfireballcollision()
if dead == 0:
if self.askforrestart(board1, scoreboard1) == 1:
return -1
else:
return 0
elif dead == 1:
fireballhitme = 1
jumpstate = 0
if statedown == 1 and ladderstate == 1:
board1.key_pressed(4)
dead = board1.checkfireballcollision()
if dead == 0:
if self.askforrestart(board1, scoreboard1) == 1:
return -1
else:
return 0
elif dead == 1:
fireballhitme = 1
jumpstate = 0
if ladderstate == 1:
board1.checkplayerlevel()
dead = board1.checkfireballcollision()
if dead == 0:
if self.askforrestart(board1, scoreboard1) == 1:
return -1
else:
return 0
elif dead == 1:
fireballhitme = 1
jumpstate = 0
if stateright == 1:
board1.key_pressed(1)
dead = board1.checkfireballcollision()
if dead == 0:
if self.askforrestart(board1, scoreboard1) == 1:
return -1
else:
return 0
elif dead == 1:
fireballhitme = 1
jumpstate = 0
if stateleft == 1:
board1.key_pressed(2)
dead = board1.checkfireballcollision()
if dead == 0:
if self.askforrestart(board1, scoreboard1) == 1:
return -1
else:
return 0
elif dead == 1:
fireballhitme = 1
jumpstate = 0
if stateright == 0 and stateleft == 0 and ladderstate == 0:
board1.setPlayerstraight()
if ladderstate == 1 and jumpstate == 1:
jumpstate = 0
if jumpstate == 1:
if board1.playerjump(jumpspeed) == 1:
jumpstate = 2
jumpspeed = 0
else:
jumpspeed -= 2
dead = board1.checkfireballcollision()
if dead == 0:
if self.askforrestart(board1, scoreboard1) == 1:
return -1
else:
return 0
elif dead == 1:
fireballhitme = 1
jumpstate = 0
if jumpstate == 2:
if board1.playerjumpdown(jumpspeed) == 1:
jumpstate = 0
jumpspeed = 10
else:
jumpspeed += 2
dead = board1.checkfireballcollision()
if dead == 0:
if self.askforrestart(board1, scoreboard1) == 1:
return -1
else:
return 0
elif dead == 1:
fireballhitme = 1
jumpstate = 0
if ladderstate == 0 and jumpstate == 0:
board1.dropplayer()
dead = board1.checkfireballcollision()
if dead == 0:
if self.askforrestart(board1, scoreboard1) == 1:
return -1
else:
return 0
elif dead == 1:
fireballhitme = 1
jumpstate = 0
board1.checkMidAir()
if board1.checkwin() == 1:
board1.respawnPlayer()
board1.initcoins(self.screen)
board1.setplayerlives()
board1.setplayerlives()
collectCoin = 10
lower, upper = fireballfrequency
fireballfrequency = (max(0, lower - 20),max(0, upper - 20))
board1.boostfireball()
board1.killfireballs()
board1.upgradeplayerlevel()
self.screen.blit(self.background, self.background.get_rect())
board1.update(self.screen)
board1.setPlayerScore(max(0, prevScore + collectCoin * 5 - fireballhitme * 25))
scoreboard1.update(board1.getPlayerScore(), self.screen)
scoreboard1.update_lives(self.screen, board1.getPlayerLives())
scoreboard1.update_level(board1.getplayerlevel(), self.screen)
pygame.display.flip()
def confirmquit(self): # Ask if player really wants to quit
while 1:
self.screen.blit(self.quitimage, (400, 200))
yes = self.screen.blit(self.yesimage, (500, 300))
no = self.screen.blit(self.noimage, (650, 300))
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
pos = pygame.mouse.get_pos()
if yes.collidepoint(pos):
return 1
elif no.collidepoint(pos):
return 0
pygame.display.flip()
def askforrestart(self, board1, scoreboard1): # Ask if player wants to restart
self.screen.blit(self.background, self.background.get_rect())
scoreboard1.update(board1.getPlayerScore(), self.screen)
board1.update(self.screen)
pygame.display.flip()
while 1:
self.screen.blit(self.restart, (400, 200))
yes = self.screen.blit(self.yesimage, (500, 300))
no = self.screen.blit(self.noimage, (650, 300))
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
pos = pygame.mouse.get_pos()
if yes.collidepoint(pos):
return 0
elif no.collidepoint(pos):
return 1
pygame.display.flip()
if __name__ == '__main__':
while 1:
game = Game('Images/background.jpg', 'Images/areyousure.png', 'Images/yes.png', 'Images/no.png',
'Images/restart.png')
status = game.run() # Launch a new game
if status == -1:
break
| {
"repo_name": "adityavagarwal/DonkeyKong",
"path": "startGame.py",
"copies": "1",
"size": "10585",
"license": "mpl-2.0",
"hash": 8560075543095723000,
"line_mean": 38.2037037037,
"line_max": 106,
"alpha_frac": 0.4752007558,
"autogenerated": false,
"ratio": 4.283690813435856,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0003317677454580936,
"num_lines": 270
} |
__author__ = 'Aditya Vikram Agarwal'
import pygame
from random import randint
import random
import player
import princess
import donkey
import block
import fireball
import coin
import ladder
class Board:
def __init__(self, screen, testmode):
self.MODE = testmode
self.blocks = []
self.ladders = []
self.coins = []
self.fireballs = []
self.castleblocks = []
self.levellimits = {}
self.ladderlimits = {}
self.donkey = None
self.princess = None
self.donkey_group = []
self.princess_group = []
# start defining Constamts here
self.PLAYER_SPEED = 10
self.PLAYER_CLIMB_SPEED = 5
self.FULL_LADDER_HEIGHT = 95
self.LADDER_WIDTH = 30
self.HALF_LADDER_HEIGHT = 35
self.PLAYER_HEIGHT = 20
self.PLAYER_WIDTH = 20
self.COIN_WIDTH = 20
self.COIN_HEIGHT = 20
self.COIN_LEVELS = [470, 390, 310, 230, 150, 70]
self.FIREBALL_HEIGHT = 25
self.FIREBALL_WIDTH = 25
self.FIREBALL_SPEED = 5
self.JUMP_LIMIT = 30
self.PLAYER_SPAWN_LEVEL = 480
self.DONKEY_SPEED = 3
self.PLAYER_DROP_LEVEL = None
# End defining constants
self.block_group = pygame.sprite.RenderPlain(*self.blocks)
self.ladder_group = pygame.sprite.RenderPlain(*self.ladders)
self.coin_group = pygame.sprite.RenderPlain(*self.coins)
self.fireball_group = pygame.sprite.RenderPlain(*self.fireballs)
self.castle_block_group = pygame.sprite.RenderPlain(*self.blocks)
self.initlogs(screen)
self.initladders(screen)
self.initcoins(screen)
self.initdonkey(screen)
self.initprincess(screen)
self.initcastle(screen)
self.plr = [player.Player("Images/player2.png", "Images/player.png", "Images/player3.png", "Images/player4.png",
(0, self.PLAYER_SPAWN_LEVEL), self.PLAYER_WIDTH, self.PLAYER_HEIGHT, 0, 2)]
self.plr_group = pygame.sprite.RenderPlain(*self.plr)
if(self.MODE == 1):
self.plr_group.draw(screen)
self.playerparentdict = {}
self.fireballparentdict = {}
self.playerparentdict[500] = self.PLAYER_SPAWN_LEVEL
for i in range(499, 0, -1): # Player's regular positions in each level
if i in [480, 400, 320, 240, 160, 80]:
self.playerparentdict[i] = i
else:
self.playerparentdict[i] = self.playerparentdict[i + 1]
self.fireballparentdict[500] = self.PLAYER_SPAWN_LEVEL
for i in range(499, 0, -1): # Fireballs' regular positions in each level
if i in [480, 400, 320, 240, 160, 80]:
self.fireballparentdict[i] = i
else:
self.fireballparentdict[i] = self.fireballparentdict[i + 1]
def initlogs(self, screen): # Initialize all blocks
self.levellimits = {400: 1, 320: 2, 240: 1, 160: 2, 80: 1, 30: 3}
self.blocks = [block.Block("Images/log.png", "Images/log.png", (0, 0), 1200, 20),
block.Block("Images/log.png", "Images/log.png", (0, 100), 700, 20),
block.Block("Images/log.png", "Images/log.png", (200, 180), 1000, 20),
block.Block("Images/log.png", "Images/log.png", (0, 260), 1000, 20),
block.Block("Images/log.png", "Images/log.png", (200, 340), 1000, 20),
block.Block("Images/log.png", "Images/log.png", (0, 420), 1000, 20),
block.Block("Images/log.png", "Images/log.png", (0, 500), 1200, 20),
]
self.block_group = pygame.sprite.RenderPlain(*self.blocks)
if(self.MODE == 1): #1 implies game mode , 0 implies test mode
self.block_group.draw(screen)
def initdonkey(self, screen): # Initialize donkey
self.donkey = donkey.Donkey("Images/Donkey2.png", "Images/Donkey.png", (20, 50), 40, 50, 0)
self.donkey_group = pygame.sprite.RenderPlain(self.donkey)
if(self.MODE == 1):
self.donkey_group.draw(screen)
def initprincess(self, screen): # Initialize princess
self.princess = princess.Princess("Images/princess2.png", "Images/princess2.png", (120, 20), 20, 30, 0)
self.princess_group = pygame.sprite.RenderPlain(self.princess)
if(self.MODE == 1):
self.princess_group.draw(screen)
def initladders(self, screen): # Initialize all ladders
self.ladders = [ladder.Ladder("Images/ladder.png", "Images/ladder.png", (800, 419), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder.png", "Images/ladder.png", (300, 339), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder.png", "Images/ladder.png", (500, 259), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder.png", "Images/ladder.png", (900, 179), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder.png", "Images/ladder.png", (600, 99), self.LADDER_WIDTH,
self.FULL_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken.png", "Images/ladder_broken.png", (650, 335),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken_down.png", "Images/ladder_broken_down.png", (650, 400),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken.png", "Images/ladder_broken.png", (850, 255),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken_down.png", "Images/ladder_broken_down.png", (850, 320),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken.png", "Images/ladder_broken.png", (300, 95),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/ladder_broken_down.png", "Images/ladder_broken_down.png", (300, 160),
self.LADDER_WIDTH, self.HALF_LADDER_HEIGHT),
ladder.Ladder("Images/castleladder.png", "Images/castleladder.png", (220, 45),
self.LADDER_WIDTH, ((self.FULL_LADDER_HEIGHT - 5) * 2) / 3)
]
for l in self.ladders:
x, y = l.getPosition()
w, h = l.getSize()
if h == self.FULL_LADDER_HEIGHT:
self.ladderlimits[l.getPosition()] = y + 1 + 60
else:
if h == ((self.FULL_LADDER_HEIGHT - 5) * 2) / 3:
self.ladderlimits[l.getPosition()] = y + 5 + 30
elif y % 10 == 0:
self.ladderlimits[l.getPosition()] = y
else:
self.ladderlimits[l.getPosition()] = y + 5 + 60
self.ladder_group = pygame.sprite.RenderPlain(*self.ladders)
if(self.MODE == 1):
self.ladder_group.draw(screen)
def initcoins(self, screen): # Initialize all coins
self.coins = []
x = 0
for i in range(0, 20):
y = self.COIN_LEVELS[randint(0, 5)]
if y == 470:
x = random.randrange(0, 1170, 30)
elif y in [390, 230]:
x = random.randrange(0, 1000, 30)
elif y in [310, 150]:
x = random.randrange(200, 1170, 30)
elif y == 70:
x = random.randrange(350, 700, 30)
self.coins += [coin.Coin("Images/coin.png", "Images/coin.png", (x, y), self.COIN_WIDTH, self.COIN_HEIGHT)]
self.coin_group = pygame.sprite.RenderPlain(*self.coins)
if(self.MODE == 1):
self.coin_group.draw(screen)
def initcastle(self, screen):
self.castleblocks = [block.Block("Images/castle.png", "Images/castle.png", (110, 50), 180, 10),
block.Block("Images/castlepillar.png", "Images/castlepillar.png", (100, 20), 20, 40),
block.Block("Images/castlepillar.png", "Images/castlepillar.png", (280, 20), 20, 40),
]
self.castle_block_group = pygame.sprite.RenderPlain(*self.castleblocks)
if(self.MODE == 1):
self.castle_block_group.draw(screen)
def createfireball(self): # Creating fireballs
donkeyx, donkeyy = self.donkey.getPosition()
self.fireballs += [fireball.Fireball("Images/fireball.png", "Images/fireball.png", (donkeyx + 5, 80),
self.FIREBALL_WIDTH, self.FIREBALL_HEIGHT, randint(1, 2))]
self.fireball_group = pygame.sprite.RenderPlain(*self.fireballs)
def key_pressed(self, event): # Handling a key pressed event
x, y = self.plr[0].getPosition()
if event == 1:
self.plr[0].setState(0)
x += self.PLAYER_SPEED
if event == 2:
self.plr[0].setState(1)
x -= self.PLAYER_SPEED
if event == 3:
y -= self.PLAYER_CLIMB_SPEED
if event == 4:
y += self.PLAYER_SPEED
x = max(x, 0)
y = max(y, 0)
x = min(x, 1170)
y = min(y, self.PLAYER_SPAWN_LEVEL)
self.plr[0].setPosition((x, y))
def checkMidAir(self): # Detecting that player should drop beyond block limits
x, y = self.plr[0].getPosition()
if y == 80 and x > 700:
y += 0.1 * self.PLAYER_SPEED
if y in self.levellimits and int(self.levellimits[y]) == 1 and x > 1000:
y += 0.1 * self.PLAYER_SPEED
if y in self.levellimits and int(self.levellimits[y]) == 2 and x < 170:
y += 0.1 * self.PLAYER_SPEED
self.plr[0].setPosition((x, y))
def update(self, screen): # Update the board
if(self.MODE == 1):
self.coin_group.draw(screen)
self.block_group.draw(screen)
self.castle_block_group.draw(screen)
self.ladder_group.draw(screen)
screen.blit(self.donkey.image, self.donkey.getPosition())
self.fireball_group.draw(screen)
self.princess_group.draw(screen)
self.plr_group.draw(screen)
def getLadderCollisions(self): # Check if player is in touch with any ladder
state = 0
broken_ladders = [(650, 335), (650, 400), (850, 255), (850, 320), (300, 95), (300, 160)]
castleladder = (220, 50)
for s in self.ladder_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
playerx, playery = rect1.topleft
rect1.height = self.PLAYER_HEIGHT
rect1.width = self.PLAYER_WIDTH
rect2 = s.rect
ladderx, laddery = s.rect.topleft
rect2.height = self.FULL_LADDER_HEIGHT
rect2.width = self.LADDER_WIDTH
if rect2.topleft == castleladder:
rect2.height = ((self.FULL_LADDER_HEIGHT - 5) * 2) / 3
if rect2.topleft in broken_ladders:
rect2.height = self.HALF_LADDER_HEIGHT
if rect1.colliderect(rect2):
if playery not in self.levellimits and playery != self.PLAYER_SPAWN_LEVEL:
self.plr[0].setPosition((ladderx + 5, playery))
self.plr[0].setState(2)
state = 1
break
if state == 1:
return 1
else:
return 0
def checkfireballcollision(self): # Check if player is dead and respawn
for s in self.fireball_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
rect1.height = rect1.width = 20
rect2 = s.rect
rect2.height = self.FIREBALL_HEIGHT
rect2.width = self.FIREBALL_WIDTH
if rect1.colliderect(rect2):
if self.plr[0].getLives() == 0:
return 0
else:
self.respawnPlayer()
self.plr[0].setState(0)
self.plr[0].setLives(self.plr[0].getLives() - 1)
return 1
return -1
def dropplayer(self): # Drop if player is in middle of air
x, y = self.plr[0].getPosition()
levelpos = y
levelpos = min(self.PLAYER_SPAWN_LEVEL, levelpos)
levelpos = self.playerparentdict[levelpos]
if y == levelpos:
return
self.plr[0].setPosition((x, min(y + 10, levelpos)))
def getCoinCollisions(self): # Checking collisions with any coin
for c in self.coin_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
rect1.height = self.PLAYER_HEIGHT
rect1.width = self.PLAYER_WIDTH
rect2 = c.rect
rect2.height = self.COIN_HEIGHT
rect2.width = self.COIN_WIDTH
if rect1.colliderect(rect2):
c.kill()
return 1
return 0
def playerjump(self, jumpspeed): # Jumping up function
x, y = self.plr[0].getPosition()
levelpos = y
levelpos = min(self.PLAYER_SPAWN_LEVEL, levelpos)
levelpos = self.playerparentdict[levelpos]
if y == levelpos:
self.PLAYER_DROP_LEVEL = y
if y <= levelpos - self.JUMP_LIMIT:
self.plr[0].setPosition((x, levelpos - self.JUMP_LIMIT))
return 1
else:
self.plr[0].setPosition((x, y - jumpspeed))
return 0
def playerjumpdown(self, jumpspeed): # Jumping down function
x, y = self.plr[0].getPosition()
levelpos = y
if self.PLAYER_DROP_LEVEL:
if min(levelpos, self.PLAYER_DROP_LEVEL) == self.PLAYER_DROP_LEVEL:
levelpos = min(levelpos, self.PLAYER_DROP_LEVEL)
self.PLAYER_DROP_LEVEL = None
levelpos = self.playerparentdict[levelpos]
if y >= levelpos:
self.plr[0].setPosition((x, levelpos))
return 1
else:
self.plr[0].setPosition((x, y + jumpspeed))
return 0
def checkplayerlevel(self): # checks that player should not fall down beyond ladder through a block
x, y = self.plr[0].getPosition()
for s in self.ladder_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
rect1.height = self.PLAYER_HEIGHT
rect1.width = self.PLAYER_WIDTH
rect2 = s.rect
if rect1.colliderect(rect2):
y = min(y, self.ladderlimits[rect2.topleft])
self.plr[0].setPosition((x, y))
break
def updatefireballs(self): # Update fireball positions and directions
i = 0
for s in self.fireball_group.sprites():
x, y = s.getPosition()
if x <= 0 and y == self.PLAYER_SPAWN_LEVEL:
pass
else:
state = s.getState()
if x <= 0:
state = 1
if x >= 1180:
state = 2
if state != 3:
if state == 1:
x += self.FIREBALL_SPEED
else:
x -= self.FIREBALL_SPEED
collisions = pygame.sprite.spritecollide(s, self.ladder_group, False)
if collisions:
ly = self.ladderlimits[collisions[0].rect.topleft]
ladderx, laddery = collisions[0].rect.topleft
if y != ly:
val = randint(1, 10)
if val == 5:
y += 2 * self.FIREBALL_SPEED
x = ladderx
state = 3
if y == 80 and x > 700:
y += 2 * self.FIREBALL_SPEED
state = 3
if y in self.levellimits and int(self.levellimits[y]) == 1 and x > 1000:
y += 2 * self.FIREBALL_SPEED
state = 3
if y in self.levellimits and int(self.levellimits[y]) == 2 and x < 170:
y += 2 * self.FIREBALL_SPEED
state = 3
else:
y = min(self.fireballparentdict[y], y + 2 * self.FIREBALL_SPEED)
if self.fireballparentdict[y] == y:
state = randint(0, 1)
self.fireballs[i] = fireball.Fireball("Images/fireball.png", "Images/fireball.png", (x, y),
self.FIREBALL_WIDTH, self.FIREBALL_HEIGHT, state)
i += 1
del self.fireballs[i:]
self.fireball_group = pygame.sprite.RenderPlain(*self.fireballs)
def updatedonkey(self, flipdonkey): # Update donkey position and direction
self.donkey.setState(self.donkey.getState() ^ flipdonkey)
direction = self.donkey.getdirection()
x, y = self.donkey.getPosition()
if x >= 180:
direction = 1
if x <= 0:
direction = 0
if direction == 0:
x += self.DONKEY_SPEED
else:
x -= self.DONKEY_SPEED
x = min(x,180)
x = max(x,0)
self.donkey.setdirection(direction)
self.donkey.setPosition((x, y))
self.donkey_group = pygame.sprite.RenderPlain(self.donkey)
def getPlayerScore(self):
return self.plr[0].getScore()
def setPlayerScore(self, newscore):
self.plr[0].setScore(newscore)
def getPlayerLives(self):
return self.plr[0].getLives()
def checkwin(self): # check if player reached destination
x, y = self.plr[0].getPosition()
if y <= 35:
for b in self.castle_block_group.sprites():
rect1 = self.plr[0].rect
rect1.topleft = self.plr[0].getPosition()
rect1.height = self.PLAYER_HEIGHT
rect1.width = self.PLAYER_WIDTH
rect2 = b.rect
if rect1.colliderect(rect2):
return 1
return 0
def setPlayerstraight(self): # Set player straight when not moving
self.plr[0].setState(3)
def respawnPlayer(self): # Respawn player at left bottom
self.killfireballs()
self.plr[0].setPosition((0, self.PLAYER_SPAWN_LEVEL))
def setplayerlives(self):
self.plr[0].setLives(2)
def killfireballs(self): # Kill all fireballs
self.fireballs = []
self.fireball_group = pygame.sprite.RenderPlain(*self.fireballs)
def upgradeplayerlevel(self):
self.plr[0].upgradelevel()
def getplayerlevel(self):
return self.plr[0].getlevel()
def boostfireball(self): # Increase speed of fireball
self.FIREBALL_SPEED += 2
| {
"repo_name": "adityavagarwal/DonkeyKong",
"path": "board.py",
"copies": "1",
"size": "19483",
"license": "mpl-2.0",
"hash": 7843700141116881000,
"line_mean": 42.6838565022,
"line_max": 120,
"alpha_frac": 0.5339013499,
"autogenerated": false,
"ratio": 3.5436522371771555,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45775535870771555,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Aditya Vikram Agarwal'
import pygame
import person
class Player(person.PersonSprite):
def __init__(self, image_left, image_right, image_straight, image_climb, position, width, height, state, lives):
person.PersonSprite.__init__(self, image_left, image_right, position, width, height, state)
self.score = 0
self.lives = lives
self.image_straight = pygame.image.load(image_straight)
self.image_straight = pygame.transform.scale(self.image_straight, (width, height))
self.image_climb = pygame.image.load(image_climb)
self.image_climb = pygame.transform.scale(self.image_climb, (width, height))
self.level = 1
def setState(self, state):
self.state = state
if state == 0:
self.image = self.left
elif state == 1:
self.image = self.right
elif state == 3:
self.image = self.image_straight
elif state == 2:
self.image = self.image_climb
def setLives(self, lives):
self.lives = lives
def setScore(self, newScore):
self.score = newScore
def getState(self):
return self.state
def getLives(self):
return self.lives
def getScore(self):
return self.score
def upgradelevel(self):
self.level += 1
def getlevel(self):
return self.level
| {
"repo_name": "adityavagarwal/DonkeyKong",
"path": "player.py",
"copies": "1",
"size": "1387",
"license": "mpl-2.0",
"hash": 5243465007668985000,
"line_mean": 27.8958333333,
"line_max": 116,
"alpha_frac": 0.6128334535,
"autogenerated": false,
"ratio": 3.6693121693121693,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9777843463914422,
"avg_score": 0.0008604317795494267,
"num_lines": 48
} |
__author__ = 'Aditya Vikram Agarwal'
import pygame
class ScoreBoard(pygame.sprite.Sprite):
def __init__(self, image, text, screen, life_image):
font = pygame.font.SysFont("comicsansms", 70)
text = font.render(str(text), True, (102, 51, 0))
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load(image)
self.life_image = pygame.image.load(life_image)
self.image = pygame.transform.scale(self.image, (200, 80))
screen.blit(self.image, (500, 530))
screen.blit(text, (500 + 75 - (text.get_width() / 2), 530 + 55 - (text.get_height() / 2)))
def update(self, text, screen):
font = pygame.font.SysFont("comicsansms", 70)
text = font.render(str(text), True, (102, 51, 0))
screen.blit(self.image, (500, 530))
screen.blit(text, (500 + 50 - (text.get_width() / 2), 530 + 40 - (text.get_height() / 2)))
def update_lives(self, screen, lives):
for i in range(lives):
self.life_image = pygame.transform.scale(self.life_image, (25, 50))
screen.blit(self.life_image, (620 + i * 35, 545))
def update_level(self, level, screen):
font = pygame.font.SysFont("comicsansms", 70)
text = font.render(str(level), True, (218, 165, 32))
text = pygame.transform.scale(text, (20, 30))
screen.blit(text, (500 + 100 - (text.get_width() / 2), 530 + 20 - (text.get_height() / 2)))
| {
"repo_name": "adityavagarwal/DonkeyKong",
"path": "scoreboard.py",
"copies": "1",
"size": "1440",
"license": "mpl-2.0",
"hash": 9221288213720448000,
"line_mean": 45.4516129032,
"line_max": 99,
"alpha_frac": 0.5923611111,
"autogenerated": false,
"ratio": 3.0835117773019274,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4175872888401927,
"avg_score": null,
"num_lines": null
} |
__author__ = 'adiyoss'
import os
from optparse import OptionParser
from lib.textgrid import *
def create_text_grid(text_grid_path, label, num_of_frames):
"""
create TextGrid files from the labels_path file
"""
# should be different for every file format
FRAME_RATE = 10
MILLISEC_2_SEC = 0.001
FRAME_CONCAT_ONSET = 0
FRAME_CONCAT_OFFSET = 0
v_onset = (label[0] + FRAME_CONCAT_ONSET)
v_offset = (label[1] + FRAME_CONCAT_OFFSET)
length = num_of_frames*FRAME_RATE*MILLISEC_2_SEC
# build TextGrid
text_grid = TextGrid()
vowels_tier = IntervalTier(name='Duration', minTime=0.0, maxTime=float(length))
vowels_tier.addInterval(Interval(0, float(v_onset) * FRAME_RATE * MILLISEC_2_SEC, ""))
vowels_tier.addInterval(Interval(float(v_onset) * FRAME_RATE * MILLISEC_2_SEC, float(v_offset) * FRAME_RATE * MILLISEC_2_SEC, ""))
vowels_tier.addInterval(Interval(float(v_offset) * FRAME_RATE * MILLISEC_2_SEC, float(length), ""))
text_grid.append(vowels_tier)
text_grid.write(text_grid_path) | {
"repo_name": "adiyoss/DeepWDM",
"path": "front_end/lib/text_grid_creation.py",
"copies": "1",
"size": "1056",
"license": "mit",
"hash": 8182936792220168000,
"line_mean": 34.2333333333,
"line_max": 134,
"alpha_frac": 0.6799242424,
"autogenerated": false,
"ratio": 2.823529411764706,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8992038988583373,
"avg_score": 0.0022829331162664496,
"num_lines": 30
} |
__author__ = 'adiyoss'
import os
import sys
from optparse import OptionParser
import numpy as np
import matplotlib.pyplot as plt
NUM_OF_FEATURES = 20
graphIndex = 0
predict_line = 1
label_line = 1
phi_indicator = 0
def on_type(event):
global graphIndex
global predict_line
global label_line
global phi_indicator
if event.key == 'q':
sys.exit(0)
elif event.key == 'right':
graphIndex -= 1
elif event.key == 'left':
graphIndex += 1
elif event.key == 'p':
predict_line = 0 if predict_line == 1 else 1
elif event.key == 'l':
label_line = 0 if label_line == 1 else 1
elif event.key == 'r':
phi_indicator = 0
elif event.key == 'f':
phi_indicator = 1
elif event.key == 'm':
phi_indicator = 2
else:
return
plt.close()
def display_features(filename, frame_begin_and_end_real, frame_begin_and_end_predict):
if not os.path.isfile(filename):
sys.stderr.write("WARNING: file not found, " + str(filename))
# compute the phi values
labels = frame_begin_and_end_real.split('-')
predict = frame_begin_and_end_predict.split('-')
m = np.loadtxt(filename)
phi_m = np.zeros(m.shape, dtype=np.float64)
phi_mean = np.zeros(m.shape, dtype=np.float64)
window_size = 10 # 25ms window
for i in range(window_size, len(m) - window_size):
pre_values = np.zeros(len(m[0, :]))
post_values = np.zeros(len(m[0, :]))
for j in range(window_size):
pre_values += (m[i - window_size + j, :])
post_values += (m[i + j, :])
phi_m[i, :] = (post_values - pre_values) / window_size
tmp = np.zeros(len(m[0, :]))
onset = int(labels[0])
offset = int(labels[1])
for i in range(0, onset):
tmp += m[i, :]
tmp /= onset
for i in range(0, onset):
phi_mean[i, :] = tmp
tmp = np.zeros(len(m[0, :]))
for i in range(onset, offset):
tmp += m[i, :]
tmp /= offset
for i in range(onset, offset):
phi_mean[i, :] = tmp
tmp = np.zeros(len(m[0, :]))
for i in range(offset, len(m)):
tmp += m[i, :]
tmp /= (len(m) - offset)
for i in range(offset, len(m)):
phi_mean[i, :] = tmp
# the feature names for the title
feature_names = ['Short Term Energy', 'Total Energy', 'Low Energy', 'High Energy', 'Wiener Entropy',
'Auto Correlation', 'Pitch',
'Voicing', 'Zero Crossing', 'Vowel', 'Nasal', 'Glide', 'Sil', 'Sum Vowels', 'Sum Nasals',
'Sum Glides',
'MFCC_1', 'MFCC_2', 'MFCC_3', 'MFCC_4']
while True:
index = graphIndex % len(feature_names)
fig = plt.figure(1, figsize=(20, 10))
fig.canvas.mpl_connect('key_press_event', on_type)
fig.suptitle(feature_names[index], fontsize='x-large', style='italic', fontweight='bold')
max_m = np.max(m[:, index])
min_m = np.min(m[:, index])
width = float(0.6)
if phi_indicator == 0:
plt.plot((m[:, index]), linestyle='-', linewidth=width, color='#006699')
elif phi_indicator == 1:
plt.plot((phi_m[:, index]), linestyle='-', linewidth=width, color='#006699')
elif phi_indicator == 2:
plt.plot((phi_mean[:, index]), linestyle='-', linewidth=width, color='#006699')
if label_line == 1:
labels_plot, = plt.plot([labels[0], labels[0]], [min_m, max_m], linestyle='-', color="#730A0A", lw=2)
plt.plot([labels[1], labels[1]], [min_m, max_m], linestyle='-', color="#730A0A", lw=2)
if predict_line == 1:
predict_plot, = plt.plot([predict[0], predict[0]], [min_m, max_m], linestyle=':', color='#335C09', lw=2)
plt.plot([predict[1], predict[1]], [min_m, max_m], linestyle=':', color='#335C09', lw=2)
plt.xlim(xmin=0, xmax=len(m))
# plot the legend
plt.figtext(0.13, 0.05, 'Q: quit', style='italic')
plt.figtext(0.2, 0.05, 'P: Enable/disable prediction marks', style='italic')
plt.figtext(0.38, 0.05, 'L: Enable/disable real label marks', style='italic')
plt.figtext(0.56, 0.05, 'F/R: Show/Hide the feature functions', style='italic')
plt.figtext(0.13, 0.02, 'Left arrow: Next figure', style='italic')
plt.figtext(0.38, 0.02, 'Right arrow: Previous figure', style='italic')
l2 = plt.legend([labels_plot, predict_plot], ["Real Label", "Predict Label"])
plt.gca().add_artist(l2) # add l1 as a separate artist to the axes
plt.show()
# parse the parameters
# the first argument should be the labels file from the intellij
# the second argument should be the path to the directory in which the textGrid files are located
# #-------------MENU--------------#
parser = OptionParser()
parser.add_option("-f", "--file", dest="file", help="The name of the data file", metavar="FILE")
parser.add_option("-l", "--label", dest="label", help="The onset and offset of the vowel, Example: 100-138",
metavar="FILE")
parser.add_option("-p", "--predict", dest="predict", help="The predicted onset and offset of the vowel, same as before",
metavar="FILE")
(options, args) = parser.parse_args()
# validation
if options.file is None or options.label is None or options.predict is None:
sys.stderr.write("Invalid number of arguments.")
else:
# run the script
display_features(options.file, options.label, options.predict) | {
"repo_name": "adiyoss/AutoVowelDuration",
"path": "visualization/Python/display_features_and_phi.py",
"copies": "1",
"size": "5523",
"license": "mit",
"hash": -8267305431936353000,
"line_mean": 37.0965517241,
"line_max": 120,
"alpha_frac": 0.577584646,
"autogenerated": false,
"ratio": 3.2816399286987523,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.934820346170057,
"avg_score": 0.002204222599636419,
"num_lines": 145
} |
__author__ = 'Administrator'
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def merge_list(head1, head2):
fakehead = ListNode(0)
lastnode = fakehead
while head1 != None and head2 != None:
if head1.val <= head2.val:
lastnode.next = head1
lastnode = head1
head1 = head1.next
else:
lastnode.next = head2
lastnode = head2
head2 = head2.next
if head1:
lastnode.next = head1
if head2:
lastnode.next = head2
return fakehead.next
def print_list(head):
while head != None:
print head.val
head = head.next
array1 = [13]
array2 = [1, 12, 22, 25]
fakehead1 = ListNode(0)
lasthead1 = fakehead1
for a in array1:
lasthead1.next = ListNode(a)
lasthead1 = lasthead1.next
fakehead2 = ListNode(0)
lasthead2 = fakehead2
for a in array2:
lasthead2.next = ListNode(a)
lasthead2 = lasthead2.next
print_list(fakehead1.next)
print_list(fakehead2.next)
print '======================='
print_list(merge_list(fakehead1.next, fakehead2.next)) | {
"repo_name": "deepbluech/leetcode",
"path": "SwordOffer17_MergeList.py",
"copies": "1",
"size": "1123",
"license": "mit",
"hash": -8539987394498347000,
"line_mean": 23.4347826087,
"line_max": 54,
"alpha_frac": 0.5983971505,
"autogenerated": false,
"ratio": 3.1633802816901406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4261777432190141,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
class Solution:
# @param A, a list of integers
# @param target, an integer to be searched
# @return an integer
def search(self, A, target):
n = len(A)
if n == 0:
return -1
if n == 1:
if A[0] == target:
return 0
else:
return -1
if A[n-1] > A[0]:
return self.binary_search(A, target, 0, n-1)
break_idx = self.find_break_index(A, 0, n-1)
print break_idx
largest_val = A[break_idx - 1]
smallest_val = A[break_idx]
head_val = A[0]
tail_val = A[n-1]
if target >= smallest_val and target <= tail_val:
return self.binary_search(A, target, break_idx, n-1)
if target >= head_val and target <= largest_val:
return self.binary_search(A, target, 0, break_idx)
return -1
def find_break_index(self, A, start, end):
if end - start == 1:
return end
if end - start == 0:
return -1
mid = start + (end - start) / 2
while A[mid] > A[start]:
start = mid
mid = (mid + 1 + end) / 2
return self.find_break_index(A, start, mid)
def binary_search(self, A, target, start, end):
if start > end or start < 0 or end >= len(A):
return -1
mid = start + (end - start) / 2
if A[mid] == target:
return mid
elif A[mid] < target:
return self.binary_search(A, target, mid+1, end)
else:
return self.binary_search(A, target, start, mid-1)
if __name__ == '__main__':
s = Solution()
#no duplicate
A = [1,1,1,0,1]
print s.find_break_index(A, 0, len(A)-1) | {
"repo_name": "deepbluech/leetcode",
"path": "Search in Rotated Sorted Array.py",
"copies": "1",
"size": "1768",
"license": "mit",
"hash": -5065643421320573000,
"line_mean": 30.0350877193,
"line_max": 64,
"alpha_frac": 0.4949095023,
"autogenerated": false,
"ratio": 3.4263565891472867,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4421266091447287,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
# -*- coding:utf-8 -*-
#KissPython
import atm
import sys
product_list = [
("Iphone",4000),
("Computer",5000),
("Coffee",300),
("Fruit",200),
("Mousse",100)
]
shopping_list = []
count = 0
while True:
index = 1
for i in product_list:
print index,i
index += 1
select = int(raw_input("请选择所要购买商品编号 <0为结算> :").strip())
if select not in range(len(product_list) + 1):
print "商品编号错误!\n"
continue
if select == 0:
if bool(shopping_list) == False:
print "您还没有选购商品,无法进行结算!\n"
continue
print "购买商品信息: " #%s " % shopping_list
for i in range(len(product_list)):
if product_list[i][0] in shopping_list:
print "%s: %s" % (product_list[i][0],shopping_list.count(product_list[i][0]))
print "需要支付金额: %s" % count
card = raw_input("请输入支付卡号: ").strip()
passwd = raw_input("请输入密码: ").strip()
code = atm.login(card,passwd)
if code == 0:
qr = raw_input("确认购买?[y/n] ")
if qr == "y":
list = atm.disk_load("user_disk.pkl")
if list[card]["black"] >= count:
list[card]["black"] -= count
a = "卡消费 %s 元" % count
atm.record(card,"user_disk.pkl",a,list[card]["black"])
print "购买成功!"
sys.exit()
else:
print "余额不足!"
sys.exit()
else:
sys.exit(0)
select -= 1
count += product_list[select][1]
shopping_list.append(product_list[select][0])
| {
"repo_name": "kisspython/atm",
"path": "shopping.py",
"copies": "1",
"size": "1827",
"license": "apache-2.0",
"hash": -589453120312307500,
"line_mean": 29.4909090909,
"line_max": 93,
"alpha_frac": 0.4788312463,
"autogenerated": false,
"ratio": 2.9839857651245554,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39628170114245553,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
# -*- coding:utf-8 -*-
#KissPython
import pickle,time,md5,sys
def disk_load(disk_file):
with open(disk_file) as fr:
return pickle.load(fr)
def disk_write(info,disk_file):
with open(disk_file,'wb') as fw:
pickle.dump(info,fw)
def deciphering_md5(password):
return md5.new(password).hexdigest()
def record(card,disk_file,action,balance):
fr = file(disk_file,'rb')
info = pickle.load(fr)
dic = {}
dic["time"] = time.strftime('%Y-%m-%d %H:%M:%S')
dic["action"] = action
dic["balance"] = balance
info[card]["black"] = balance
add = info[card]["record"].append(dic)
with open(disk_file,'wb') as fw:
pickle.dump(info,fw)
def login(card,passwd):
users = disk_load("user_disk.pkl")
if card not in users:
print "未知帐户 %s !\n已自动退出!" % card
return 1
if users[card]["locked"] == True:
print "帐户 %s 已被锁定 !\n请联系工作人员!" % card
return 3
passerror = 0
while deciphering_md5(passwd) != users[card]["password"]:
passerror += 1
if passerror == 3:
print "帐户 %s 已被锁定" % card
users[card]["locked"] = True
disk_write(users,"user_disk.pkl")
return 2
passwd = raw_input(("密码错误 %s 次!\n请重新输入密码:") % passerror)
return 0
def main():
card = raw_input("请输入银行卡号: ").strip()
passwd = raw_input("请输入密码: ").strip()
logincode = login(card,passwd)
if logincode == 0:
print '\n'
list = ["查询余额","查看交易明细","取现","转账","还款","修改密码","退出"]
input_error = 0
while True:
index = 1
for item in list:
#print index + "." + item
print index,item
index += 1
select = raw_input("请选择需要办理的业务编号: ").strip()
if bool(select) == True:
if select == "7":
print "已退出...\n"
sys.exit(0)
if select == "1":
users = disk_load("user_disk.pkl")
print "卡号 : %s" % card
print "卡内余额: %s 元\n" % users[card]["black"]
elif select == "2":
users = disk_load("user_disk.pkl")
for i in users[card]["record"]:
print i["time"],"\t",i["action"],"\t"," 余额:",i["balance"],
#print ok
print '\n'
elif select == "3":
users = disk_load("user_disk.pkl")
output = int(raw_input("请输入要取现金额数:" ).strip())
output *= 1.05
if output > users[card]["black"]:
print "余额不足! \n卡内余额: %s 元\n" % users[card]["black"]
else:
users[card]["black"] -= output
mess = "取现 %s 元" % output
record(card,"user_disk.pkl",mess,users[card]["black"])
print "取现成功 !\n"
elif select == "4":
users = disk_load("user_disk.pkl")
out_card = raw_input("请输入转入银行卡号: ").strip()
out_me = int(raw_input("请输入转入金额: ").strip())
if out_me > users[card]["black"]:
print "账户余额不足!\n"
else:
users[card]["black"] -= out_me
mess = "转出 %s 元" % out_me
record(card,"user_disk.pkl",mess,users[card]["black"])
#print "转出金额: %s 到 %s 成功!\n " % (out_me,out_card)
print "转账成功! \n"
elif select == "5":
users = disk_load("user_disk.pkl")
in_count = int(raw_input("请输入还款金额数:"))
users[card]["black"] += in_count
mess = "还款 %s 元" % in_count
record(card,"user_disk.pkl",mess,users[card]["black"])
print "还款成功!\n"
print "卡内余额 %s \n" % users[card]["black"]
elif select == "6":
oldpass = raw_input("请输入当前密码:").strip()
firstpass = raw_input("请输入新密码:").strip()
secpass = raw_input("请再次输入密码:").strip()
if firstpass != secpass:
print "两次输入不匹配!\n"
continue
users = disk_load("user_disk.pkl")
#oldmd5pass = deciphering_md5(oldpass)
if deciphering_md5(oldpass) != users[card]["password"]:
print "\n当前密码不正确!\n"
continue
newmd5pass = md5.new(firstpass).hexdigest()
users[card]["password"] = newmd5pass
disk_write(users,"user_disk.pkl")
print "\n密码修改成功!\n"
else:
if input_error >= 3:
print "已自动退出!\n"
sys.exit(2)
print "输入错误!\n"
input_error += 1
else:
pass
if __name__ == "__main__":
main()
| {
"repo_name": "kisspython/atm",
"path": "atm.py",
"copies": "1",
"size": "5660",
"license": "apache-2.0",
"hash": 8397637703270422000,
"line_mean": 35.4055944056,
"line_max": 82,
"alpha_frac": 0.432961967,
"autogenerated": false,
"ratio": 3.3307741522712733,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4263736119271273,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
#coding:utf-8
from parseIni import *
from parseIniFile import *
def test0():
# 一般写法
# f = open("0.ini","rb")
# print getValue(f,"global","port");
# f.close()
# 自动管理close,try catch [with open as 从python2.5引入(需要通过 from __future__ import with_statement 导入后才可以使用),从 2.6 版本开始缺省可用]
with open("0.ini","rb") as f:
print getValue(f,"global","port");
def test1():
# 一般写法
# f = open("1.ini","rb")
# strFileContent = f.read()
# f.close()
# 自动管理close,try catch [with open as 从python2.5引入(需要通过 from __future__ import with_statement 导入后才可以使用),从 2.6 版本开始缺省可用]
with open("1.ini","rb") as f:
strFileContent = f.read()
vardict = {}
var1 = getPlatformMap(strFileContent)
for k,v in var1.items():
var2 = getSectionMap(v)
dict3 = {}
for k2,v2 in var2.items():
var3 = getValueMap(v2)
dict3[k2] = var3
vardict[k] = dict3
print vardict["part2"]["global"]["ip"]
test0();
test1(); | {
"repo_name": "xiaomogui/lulu",
"path": "PythonObject/ppyconf/start.py",
"copies": "2",
"size": "1154",
"license": "apache-2.0",
"hash": -6926260300824765000,
"line_mean": 24.675,
"line_max": 121,
"alpha_frac": 0.5838206628,
"autogenerated": false,
"ratio": 2.425531914893617,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4009352577693617,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
#coding=utf-8
import binascii
from parseData.ParseAMF0 import *
from linkAMQ.ConnectAMQ import *
def send_to_amq(data):
print "\nin python function [send_to_amq] : "
#得到字符串的字节码数组
#bytes = bytearray(data)
#for byte in bytes:
#print byte;
hex_data = binascii.b2a_hex(data)
hex_data_in_space = " ".join([x+y for x,y in zip(hex_data[::2], hex_data[1::2])])
# print data
# print hex_data
# print hex_data_in_space
praseAmf0 = ParseAMF0()
pdata = praseAmf0.prase(hex_data)
connectAMQ = ConnectAMQSinleton.getInstance()
connectAMQ.send(pdata)
return 0
def connect_amq(hostname='192.168.1.166', port=61613):
connectAMQ = ConnectAMQSinleton.getInstance()
connectAMQ.connect(hostname, port)
def connect_send(data, hostname='192.168.1.166', port=61613):
print "\nin python function [connect_send] : "
hex_data = binascii.b2a_hex(data)
praseAmf0 = ParseAMF0()
pdata = praseAmf0.prase(hex_data)
connectAMQ = ConnectAMQ()
connectAMQ.connect(hostname, 61613)
connectAMQ.send(pdata)
connectAMQ.disConnect()
print "send ok!"
return 0
#connect_amq()
#send_to_amq("hello world")
#connect_send("hi") | {
"repo_name": "xiaomogui/lulu",
"path": "PythonObject/ProcessPcapData/processPcapData.py",
"copies": "2",
"size": "1255",
"license": "apache-2.0",
"hash": 3857742008065134000,
"line_mean": 23.1960784314,
"line_max": 85,
"alpha_frac": 0.6666666667,
"autogenerated": false,
"ratio": 2.8086560364464694,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44753227031464693,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
#coding=utf-8
import threading
from stompy.simple import Client
# 单例模式
class ConnectAMQSinleton(object):
__instance = None
__lock = threading.Lock() # used to synchronize code
__stompClient = None
# 默认订阅amf_pcap_dataamf_pcap_data队列
__subscribe = '/queue/amf_pcap_data'
def __init__(self):
'disable the __init__ method'
def __new__(cls):
return object.__new__(cls)
@staticmethod
def getInstance():
if not ConnectAMQSinleton.__instance:
ConnectAMQSinleton.__lock.acquire(0)
if not ConnectAMQSinleton.__instance:
ConnectAMQSinleton.__instance = object.__new__(ConnectAMQSinleton)
object.__init__(ConnectAMQSinleton.__instance)
ConnectAMQSinleton.__lock.release()
return ConnectAMQSinleton.__instance
def connect(self, hostname='192.168.1.166', port=61613):
# 通过simple方式连接JMS服务器
# 指定hostname和port(tips:ActiveMQ支持多种协议连接stomp协议的默认端口为61613,这里不要写为61616)
ConnectAMQSinleton.__stompClient = Client(hostname, port)
#stomp = Client()#如果是ActiveMQ和ActiveMQ客户端(本程序)在同一台机器可使用默认值:hostname="localhost",port=61613
# 连接服务器
ConnectAMQSinleton.__stompClient.connect()
def disConnect(self):
if ConnectAMQSinleton.__stompClient != None:
# 退订
ConnectAMQSinleton.__stompClient.unsubscribe(ConnectAMQSinleton.__subscribe)
# 关闭连接
ConnectAMQSinleton.__stompClient.disconnect()
def send(self, data):
# 发送消息到指定的queue
ConnectAMQSinleton.__stompClient.put(data, destination=ConnectAMQSinleton.__subscribe)
def receive(self):
# 从指定的queue订阅消息。ack参数指定为"client",不然可能出现一个问题(具体忘了,以后补充),ack默认值为"auto"
ConnectAMQSinleton.__stompClient.subscribe(ConnectAMQSinleton.__subscribe,ack="client")
# 等待接收ActiveMQ推送的消息
message = ConnectAMQSinleton.__stompClient.get()
# 消息的主体
receiveData = message.body
ConnectAMQSinleton.__stompClient.ack(message)
return receiveData
# get方法
def getSubscribe(self):
return ConnectAMQSinleton.__subscribe
# set方法
def setSubscribe(self, subscribe):
ConnectAMQSinleton.__subscribe = subscribe
class ConnectAMQ:
__stompClient = None
# 默认订阅amf_pcap_dataamf_pcap_data队列
__subscribe = '/queue/amf_pcap_data'
def __init__(self):
'init'
def connect(self, hostname='192.168.1.166', port=61613):
# 通过simple方式连接JMS服务器
# 指定hostname和port(tips:ActiveMQ支持多种协议连接stomp协议的默认端口为61613,这里不要写为61616)
ConnectAMQ.__stompClient = Client(hostname, port)
#stomp = Client()#如果是ActiveMQ和ActiveMQ客户端(本程序)在同一台机器可使用默认值:hostname="localhost",port=61613
# 连接服务器
ConnectAMQ.__stompClient.connect()
def disConnect(self):
if ConnectAMQ.__stompClient != None:
# 退订
ConnectAMQ.__stompClient.unsubscribe(ConnectAMQ.__subscribe)
# 关闭连接
ConnectAMQ.__stompClient.disconnect()
def send(self, data):
# 发送消息到指定的queue
ConnectAMQ.__stompClient.put(data, destination=ConnectAMQ.__subscribe)
def receive(self):
# 从指定的queue订阅消息。ack参数指定为"client",不然可能出现一个问题(具体忘了,以后补充),ack默认值为"auto"
ConnectAMQ.__stompClient.subscribe(ConnectAMQ.__subscribe,ack="client")
# 等待接收ActiveMQ推送的消息
message = ConnectAMQ.__stompClient.get()
# 消息的主体
receiveData = message.body
ConnectAMQ.__stompClient.ack(message)
return receiveData
# get方法
def getSubscribe(self):
return ConnectAMQ.__subscribe
# set方法
def setSubscribe(self, subscribe):
ConnectAMQ.__subscribe = subscribe | {
"repo_name": "xiaomogui/lulu",
"path": "lulu/lulu/python/linkAMQ/ConnectAMQ.py",
"copies": "2",
"size": "4337",
"license": "apache-2.0",
"hash": -5810222889464626000,
"line_mean": 31.9298245614,
"line_max": 98,
"alpha_frac": 0.654676259,
"autogenerated": false,
"ratio": 2.727470930232558,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4382147189232558,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
# -*- coding:utf-8 -*-
#KissPython
import SocketServer,os,time
from db import *
class MyServer(SocketServer.BaseRequestHandler):
def setup(self):
pass
def handle(self):
while True: #
print self.request,self.client_address,self.server
#print "IP: %s Port: %s 连接成功." % self.client_address
data = self.request.recv(1024)
data = data.split('|')
if data[0] == "list":
cur = db.cursor()
sql = ("select home from user where username = '%s';" % data[1])
cur.execute(sql)
pathsql = cur.fetchone()
if os.path.isdir(pathsql[0]):
print "用户: %s 获取家目录: %s 成功." % (data[1],pathsql[0])
self.request.send(str(os.listdir(pathsql[0])))
else:
print "用户: %s 获取家目录: %s 未找到." % (data[1],pathsql[0])
self.request.send("家目录: %s 未找到." % (pathsql[0]))
if data[0] == "put":
if len(data) < 3:
self.request.send("参数错误.")
continue
cur = db.cursor()
sql = ("select home from user where username = '%s';" % data[3])
cur.execute(sql)
pathsql = cur.fetchone()
filename_from_cli,filesize = data[1],int(data[2])
f = file('%s/%s' % (pathsql[0],filename_from_cli),'wb')
recv_size = 0
flag = True
while flag:
if recv_size + 4096 > filesize:
recv_data = self.request.recv(filesize - recv_size)
flag = False
else:
recv_data = self.request.recv(4096)
recv_size += 4096
f.write(recv_data)
f.close()
print "用户: %s\t上传文件:\t%s 成功." % (data[3],filename_from_cli)
cur = db.cursor()
stime = time.strftime('%Y-%m-%d %H:%M:%S')
sql2 = ('insert into task(username,stime,task,filename) values ("%s", "%s", "%s", "%s")' \
% (data[3],stime,"Upload",filename_from_cli))
cur.execute(sql2)
db.commit()
self.request.send("文件: %s 上传成功." % filename_from_cli)
if data[0] == "get":
cur = db.cursor()
sql = ("select home from user where username = '%s';" % data[2])
cur.execute(sql)
pathsql = cur.fetchone()
if os.path.isfile('%s/%s' % (pathsql[0],data[1])):
filepath = "%s/%s" % (pathsql[0],data[1])
f = file(filepath,'rb')
file_size = os.stat(filepath).st_size
self.request.send(str(True)+"|"+str(file_size))
send_size = 0
flag = True
while flag:
if send_size + 4096 > file_size:
send_data = f.read()
flag = False
else:
send_data = f.read(4096)
send_size += 4096
self.request.send(send_data)
f.close()
cur = db.cursor()
stime = time.strftime('%Y-%m-%d %H:%M:%S')
sql3 = ('insert into task(username,stime,task,filename) values ("%s", "%s", "%s", "%s")' \
% (data[2],stime,"Download",data[1]))
cur.execute(sql3)
db.commit()
print "用户: %s\t下载文件:\t%s 成功." % (data[2],data[1])
self.request.send("文件: %s 下载成功." % data[1])
else:
self.request.send(str(False))
def finish(self):
pass
if __name__ == '__main__':
server = SocketServer.ThreadingTCPServer(('127.0.0.1',21),MyServer)
server.serve_forever()
| {
"repo_name": "kisspython/socket-ftp",
"path": "server.py",
"copies": "1",
"size": "4232",
"license": "apache-2.0",
"hash": 5036930409563591000,
"line_mean": 40.9387755102,
"line_max": 110,
"alpha_frac": 0.4240875912,
"autogenerated": false,
"ratio": 3.791512915129151,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9672977493125339,
"avg_score": 0.008524602640762486,
"num_lines": 98
} |
__author__ = 'Administrator'
# -*- coding:utf-8 -*-
#KissPython
import sys,hashlib,socket,os
from db import *
ip_port = ('127.0.0.1',21)
sk = socket.socket()
def info(username):
sk.connect(ip_port)
while True:
input = raw_input("ftp> ").strip()
input = input.split()
if input[0] == 'exit':
sk.close()
db.close()
sys.exit(0)
if len(input) == 0:
continue
if input[0] == "record":
cur = db.cursor()
sql = ("select stime,task,filename from task where username = '%s'" % username)
cur.execute(sql)
pwd = cur.fetchall()
if pwd:
for i in pwd:
print i[0]," ",i[1]," ",i[2]
else:
print "记录为空."
if input[0] == "list":
sk.send("list"+'|'+username)
data = sk.recv(1024)
print data
if input != "list" and input != "record":
if len(input) < 1:
print "输入错误"
continue
if input[0] == "put":
filepath = input[1]
if os.path.isfile(filepath):
filename,filesize = os.path.basename(filepath),os.stat(filepath).st_size
send_size = 0
flag = True
f = file(filepath,'rb')
sk.send("put"+"|"+filename+"|"+str(filesize)+"|"+username)
while flag:
if send_size + 4096 > filesize:
send_data = f.read()
flag = False
else:
send_data = f.read(4096)
send_size += 4096
sk.send(send_data)
f.close()
print sk.recv(1024)
else:
print "文件: %s 未找到" % filepath
continue
if input[0] == "get":
getfile = input[1]
sk.send("get"+"|"+getfile+"|"+username)
cord = sk.recv(1024)
cord = cord.split('|')
if cord[0] == "False":
print "文件未找到"
else:
filesize = int(cord[1])
f = file(getfile,'wb')
flag = True
recv_size = 0
while flag:
if recv_size + 4096 > recv_size:
recv_data = sk.recv(filesize - recv_size)
flag = False
else:
recv_data = sk.recv(4096)
recv_size += 4096
f.write(recv_data)
f.close()
print sk.recv(1024)
def login(username,password):
cur = db.cursor()
sql = ("select passwd,super,flag from user where username = '%s'" % username)
cur.execute(sql)
pwd = cur.fetchone()
if pwd:
if pwd[0] == hashlib.md5(password).hexdigest():
if pwd[2] == 1:
print "用户: %s 已被锁定." % username
choice = raw_input("是否进行解锁: [y/n]").strip()
if choice == "n":
sys.exit(0)
else:
if pwd[1] == 1:
unlock(username)
print "用户: %s 解锁成功.\n重新登陆\n" % username
else:
print "权限不足."
else:
print "\n登陆成功.\n"
info(username)
else:
print "密码错误"
sys.exit(2)
else:
print "用户: %s 不存在." % username
sys.exit(2)
cur.close()
db.close()
if __name__ == "__main__":
username = raw_input("请输入用户名: ").strip()
password = raw_input("请输入密码: ").strip()
login(username,password)
#login('kisspython','kisspython')
#info()
| {
"repo_name": "kisspython/socket-ftp",
"path": "client.py",
"copies": "1",
"size": "4153",
"license": "apache-2.0",
"hash": 7094436220044958000,
"line_mean": 29.6641221374,
"line_max": 92,
"alpha_frac": 0.3960667165,
"autogenerated": false,
"ratio": 4.000996015936255,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4897062732436255,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
# -*- coding:utf-8 -*-
#KissPython
import MySQLdb
# ftp测试用户名和密码均为kisspython
db = MySQLdb.connect(
host = '172.16.13.113',
user = 'kisspython',
passwd = 'kisspython',
db = 'kisspython',
port = 3306
)
#初始化数据库
def flushdb():
cur = db.cursor()
sql = """
create table user (
id bigint UNSIGNED auto_increment COMMENT 'primary key',
username VARCHAR(32) NOT NULL COMMENT 'username',
passwd CHAR(32) NOT NULL COMMENT 'password ',
super TINYINT NOT NULL DEFAULT 0 COMMENT 'super status 0 is not super user 1 is super user',
flag TINYINT NOT NULL DEFAULT 0 COMMENT 'user status 0 is active, 1 is locked',
home VARCHAR(100) NOT NULL COMMENT 'home',
PRIMARY KEY (id),
UNIQUE INDEX unq_username (username)
) COMMENT 'user table ' engine=innodb;
create table task(
username VARCHAR(32) NOT NULL COMMENT 'username',
stime VARCHAR(32) NOT NULL COMMENT 'time',
task VARCHAR(32) NOT NULL COMMENT 'task',
filename VARCHAR(32) NOT NULL COMMENT 'filename'
);
insert into user(username,passwd,super,flag,home) \
values('kisspython','27a7bc57330006339dd0536226e8077a',1,0,'D:\\\kisspython');
insert into user(username,passwd,super,flag,home) \
values('user1','27a7bc57330006339dd0536226e8077a',1,0,'D:\\\user1')
"""
try:
cur.execute(sql)
cur.close()
db.commit()
db.close()
print "Flush DB Sucessed."
except MySQLdb.Error,e:
print "Error: %d:%s" % (e.args[0], e.args[1])
db.rollback()
db.close()
if __name__ == "__main__":
flushdb()
| {
"repo_name": "kisspython/socket-ftp",
"path": "db.py",
"copies": "1",
"size": "1673",
"license": "apache-2.0",
"hash": -8632360628358502000,
"line_mean": 25.9016393443,
"line_max": 96,
"alpha_frac": 0.6337599025,
"autogenerated": false,
"ratio": 3.2303149606299213,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43640748631299214,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
#coding=utf-8
partLable = ("<",">")
sectionLable = ("[","]")
endlineLable = "\r\n"# windows下的行标志
#endlineLable = "\n"# linux下的行标志
equalLable = "="# 赋值标志
noteLable = '#'# 注释标志
# 得到总配置的map
def getPlatformMap(strtmp,lable1 = partLable,lable2 = sectionLable):
tmp = strtmp.split(lable1[0])
tmp = [elem for elem in tmp if len(elem) > 1]
tmp = [elem for elem in tmp if elem.rfind(lable1[1]) > 0]
platdict = {}
for elem in tmp:
key = elem[0:elem.find(lable1[1]):]
value = elem[elem.find(lable2[0])::]
platdict[key] = value
return platdict
# 得到各部分的map
def getSectionMap(strtmp,lable1 = sectionLable):
tmp = strtmp.split(lable1[0])
tmp = [elem for elem in tmp if len(elem) > 1]
tmp = [elem for elem in tmp if elem.rfind(lable1[1]) > 0]
sectionDict = {}
for elem in tmp:
key = elem[0:elem.find(lable1[1]):]
value = elem[elem.find(endlineLable)+len(endlineLable)::]
sectionDict[key] = value
return sectionDict
# 获取具体配置值
def getValueMap(strtmp):
tmp = strtmp.split(endlineLable)
tmp = [elem for elem in tmp if len(elem) > 1]
valueDict = {}
for elem in tmp:
if elem.find(noteLable) > 0: # 如果有注释则去掉注释
elem = elem[0:elem.find(noteLable):]
elem = ''.join(elem.split()) # 去掉空白字符
key = elem[0:elem.find(equalLable):]
value = elem[elem.find(equalLable)+len(equalLable)::]
valueDict[key] = value
return valueDict
| {
"repo_name": "xiaomogui/lulu",
"path": "PythonObject/ppyconf/parseIni.py",
"copies": "2",
"size": "1600",
"license": "apache-2.0",
"hash": 5308351640195095000,
"line_mean": 30.7872340426,
"line_max": 68,
"alpha_frac": 0.6151271754,
"autogenerated": false,
"ratio": 2.58477508650519,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9139400369336428,
"avg_score": 0.012100378513752374,
"num_lines": 47
} |
__author__ = 'Administrator'
from config import *
class Runner(pygame.sprite.Sprite):
def __init__(self, screen):
self.screen = screen
pygame.sprite.Sprite.__init__(self)
self.image = image1
self.rect = self.image.get_rect()
self.rect.x = width/2
self.rect.y = height - 84
self.surface = self.screen.surface
self.velocity = 5
self.gravity = 1
self.jump_height = -20
self.state = 'standing'
self.power_up = 'none'
self.power_up_timer = 0
self.can_jump = True
self.sprite_interval = 0
def key_event(self, event):
if event.key == pygame.K_SPACE:
if self.can_jump:
jump_sound.play()
self.state = 'jumping'
self.can_jump = False
self.velocity = self.jump_height
def move(self):
self.rect.y += self.velocity
if self.state == 'standing':
self.velocity = 0
self.can_jump = True
if not pygame.sprite.spritecollideany(self, self.screen.formation.ground) or self.state == 'jumping' or self.state == 'falling':
self.velocity += self.gravity
if self.velocity > 0:
self.state = 'falling'
def draw(self):
self.surface.blit(self.image, self.rect)
def update(self):
self.sprite_interval += 1
self.move()
if self.state == 'jumping':
self.image = image1
if self.state == 'standing':
if self.sprite_interval <= 11:
self.image = image2
elif self.sprite_interval <= 22:
self.image = image3
elif self.sprite_interval <= 33:
self.image = image4
elif self.sprite_interval <= 44:
self.image = image5
elif self.sprite_interval <= 55:
self.image = image6
elif self.sprite_interval <= 66:
self.image = image7
elif self.sprite_interval <= 77:
self.image = image8
elif self.sprite_interval <= 88:
self.image = image9
elif self.sprite_interval <= 99:
self.image = image10
elif self.sprite_interval <= 110:
self.image = image11
if self.sprite_interval >= 110:
self.sprite_interval = 0
if self.power_up != 'none':
self.power_up_timer -= 1
if self.power_up_timer == 0:
self.power_up = 'none'
class Spikes(pygame.sprite.Sprite):
def __init__(self, screen, x, y):
self.screen = screen
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load("spikes.PNG").convert_alpha()
self.image = pygame.transform.scale(self.image, (int(35), int(45)))
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
def move(self):
self.rect.x += self.screen.speed
def update(self):
self.move()
if self.rect.right <= 0:
self.kill()
class Coin(pygame.sprite.Sprite):
def __init__(self, screen, x, y):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
self.image = pygame.image.load("coin.PNG").convert_alpha()
self.image = pygame.transform.scale(self.image, (int(35), int(35)))
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
def move(self):
self.rect.x += self.screen.speed
def update(self):
self.move()
if self.rect.right <= 0:
self.kill()
class Ground(pygame.sprite.Sprite):
def __init__(self, screen, x, y):
self.screen = screen
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load("ground.PNG").convert()
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
def move(self):
self.rect.x += self.screen.speed
def update(self):
self.move()
if self.rect.right <= 0:
if self.rect.bottom == height:
self.kill()
block = Ground(self.screen, width, height-35)
self.screen.formation.ground.add(block)
else:
self.kill()
class PowerUp(pygame.sprite.Sprite):
def __init__(self, screen, type):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
self.type = type
if self.type == 1:
self.image = pygame.image.load("invincibility.png").convert_alpha()
elif self.type == 2:
self.image = pygame.image.load("coin_heaven.png").convert_alpha()
self.rect = self.image.get_rect()
self.rect.x = width
self.rect.y = random.randint(0, height-100)
def move(self):
self.rect.x += self.screen.speed
def update(self):
self.move()
if self.rect.right <= 0:
self.kill()
class Background(pygame.sprite.Sprite):
def __init__(self, screen, x):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
self.image = pygame.image.load("background.PNG").convert()
self.image = pygame.transform.scale(self.image, (int(width), int(height)))
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = 0
def move(self):
self.rect.x += self.screen.speed
def update(self):
self.move()
if self.rect.right <= 0:
self.kill()
wall = Background(self.screen, width)
self.screen.background.add(wall)
class Formation():
def __init__(self, screen):
self.screen = screen
self.x = width
self.coins = pygame.sprite.Group()
self.spikes = pygame.sprite.Group()
self.ground = pygame.sprite.Group()
self.power_ups = pygame.sprite.Group()
self.counter = 0
self.length = 1000
def choose_formation(self):
formation = random.randint(1, 4)
if formation == 1:
self.length = 900
a = 0
for i in range(0, self.length, 35):
spike = Spikes(self.screen, self.x+i, height-80)
self.spikes.add(spike)
for i in range(0, 175, 35):
platform = Ground(self.screen, self.x+i, 400)
self.ground.add(platform)
for i in range(245, 420, 35):
platform = Ground(self.screen, self.x+i, 200)
self.ground.add(platform)
for i in range(595, 770, 35):
platform = Ground(self.screen, self.x+i, 450)
self.ground.add(platform)
for i in range(35, 450, 35):
coin = Coin(self.screen, self.x+630+a, i)
self.coins.add(coin)
a += 5
if formation == 2:
self.length = 900
for i in range(0, 900, 35):
spike = Spikes(self.screen, self.x+i, height-80)
self.spikes.add(spike)
for i in range(0, 175, 35):
platform = Ground(self.screen, self.x+i, 400)
self.ground.add(platform)
for i in range(350, 525, 35):
platform = Ground(self.screen, self.x+i, 400)
self.ground.add(platform)
for i in range(700, 875, 35):
platform = Ground(self.screen, self.x+i, 400)
self.ground.add(platform)
for i in range(140, 385, 35):
coin = Coin(self.screen, self.x+i, 200)
coin2 = Coin(self.screen, self.x+350+i, 200)
self.coins.add(coin)
self.coins.add(coin2)
if formation == 3:
self.length = 875
for i in range(0, 210, 35):
spike = Spikes(self.screen, self.x+i, height-80)
self.spikes.add(spike)
spike2 = Spikes(self.screen, self.x+i+350, 355)
self.spikes.add(spike2)
spike3 = Spikes(self.screen, self.x+i+665, height-80)
self.spikes.add(spike3)
for i in range(0, 210, 35):
platform = Ground(self.screen, self.x+i, 400)
platform2 = Ground(self.screen, self.x+i+350, 400)
platform3 = Ground(self.screen, self.x+i+665, 400)
self.ground.add(platform)
self.ground.add(platform2)
self.ground.add(platform3)
if formation == 4:
self.length = 900
for i in range(0, self.length, 35):
platform = Ground(self.screen, self.x+i, 400)
self.ground.add(platform)
for i in range(0, 105, 35):
spike2 = Spikes(self.screen, self.x+i+175, 355)
coin = Coin(self.screen, self.x+i+175, 150)
self.spikes.add(spike2)
self.coins.add(coin)
spike3 = Spikes(self.screen, self.x+i+525, 355)
coin2 = Coin(self.screen, self.x+i+525, 150)
self.coins.add(coin2)
self.spikes.add(spike3)
spike = Spikes(self.screen, self.x+i+795, 355)
coin3 = Coin(self.screen, self.x+i+795, 150)
self.spikes.add(spike)
self.coins.add(coin3)
def draw(self, surface):
self.power_ups.draw(surface)
self.coins.draw(surface)
self.spikes.draw(surface)
self.ground.draw(surface)
def update(self):
self.counter += 5
self.coins.update()
self.spikes.update()
self.ground.update()
self.power_ups.update()
if self.counter == self.length:
self.counter = 0
self.choose_formation()
a = random.randint(0, 5000)
if a >= 4996:
power_up = PowerUp(self.screen, random.randint(1, 2))
self.power_ups.add(power_up)
if self.screen.runner.power_up == "coin heaven":
if self.counter % 25 == 0:
coin = Coin(self.screen, width, random.randint(0, height-100))
self.coins.add(coin)
class StartScreen():
def __init__(self, manager):
self.manager = manager
self.surface = self.manager.surface
self.title = eztext.Input(maxlength=45, color=white, prompt="RUN")
self.title.set_pos(width/2, 0)
self.run = eztext.Input(maxlength=45, color=white, prompt="GO!")
self.run.set_pos(width/2, 300)
def handle_events(self, event):
if event.type == pygame.MOUSEBUTTONDOWN:
x, y = pygame.mouse.get_pos()
if x >= 400 and x <= 440 and y >= 300 and y <= 320:
self.manager.screen = GameScreen(self.manager)
def draw(self):
self.title.draw(self.surface)
self.run.draw(self.surface)
def update(self):
pass
class GameScreen():
def __init__(self, manager):
self.manager = manager
self.speed = -5
self.surface = self.manager.surface
self.runner = Runner(self)
self.formation = Formation(self)
self.background = pygame.sprite.Group()
self.start_ground()
self.score = 0
self.scoreboard = eztext.Input(maxlength=45, color=white, prompt='distance:'+str(self.score))
self.power_up_type = eztext.Input(maxlength=45, color=white, prompt='power up: '+self.runner.power_up)
self.power_up_type.set_pos(200, 0)
def start_ground(self):
for i in range(0, width+35, 35):
block = Ground(self, i, height - 35)
self.formation.ground.add(block)
wall = Background(self, 0)
wall2 = Background(self, width)
self.background.add(wall)
self.background.add(wall2)
def handle_events(self, event):
self.runner.key_event(event)
def draw(self):
self.background.draw(self.surface)
self.runner.draw()
self.formation.draw(self.surface)
self.scoreboard.draw(self.surface)
self.power_up_type.draw(self.surface)
def check_colliding(self):
for coin in self.formation.coins:
if pygame.sprite.collide_rect(coin, self.runner):
self.score += 10
coin.kill()
for spike in self.formation.spikes:
if pygame.sprite.collide_rect(spike, self.runner):
if self.runner.power_up == 'shield':
self.runner.velocity = self.runner.jump_height
self.runner.state = 'jumping'
else:
print self.score
sys.exit()
for block in self.formation.ground:
if pygame.sprite.collide_rect(block, self.runner):
if self.runner.state == 'falling' or self.runner.state == 'standing':
if self.runner.rect.top <= block.rect.top:
self.runner.state = 'standing'
self.runner.rect.bottom = block.rect.top
for power_up in self.formation.power_ups:
if pygame.sprite.collide_rect(power_up, self.runner):
if power_up.type == 1:
self.runner.power_up = 'shield'
self.runner.power_up_timer = 500
power_up.kill()
if power_up.type == 2:
self.runner.power_up = 'coin heaven'
self.runner.power_up_timer = 500
power_up.kill()
def update(self):
self.runner.update()
self.formation.update()
self.background.update()
self.check_colliding()
self.score += 1
self.power_up_type = eztext.Input(maxlength=45, color=white, prompt='power up: '+self.runner.power_up)
self.scoreboard = eztext.Input(maxlength=45, color=white, prompt='distance: '+str(self.score))
self.power_up_type.set_pos(200, 0)
class Game():
def __init__(self, surface):
self.surface = surface
self.screen = StartScreen(self)
def draw(self):
self.screen.draw()
def handle_events(self, event):
self.screen.handle_events(event)
def update(self):
self.screen.update() | {
"repo_name": "MasterOfDisguise/runner",
"path": "classes.py",
"copies": "1",
"size": "14312",
"license": "mit",
"hash": 1164513585669559600,
"line_mean": 33.3237410072,
"line_max": 136,
"alpha_frac": 0.5371017328,
"autogenerated": false,
"ratio": 3.6772867420349433,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4714388474834943,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from django import template
from django.template.loader import get_template
from Utils import parsers
class ButtonNode(template.Node):
def __init__(self, vals):
self.action = template.Variable(vals[1])
self.name = template.Variable(vals[2])
def render(self, context):
return get_template('button_widget.html').render(template.Context({'action':self.action,'name':self.name}))
class PkNode(template.Node):
def __init__(self, vals):
self.pk = template.Variable(vals[1])
def render(self, context):
context_pk = self.pk.resolve(context)
actual_key = context_pk.form.initial['code']
return actual_key
register = template.Library()
def checkboxFunc():
return {}
def checkbox_utils():
return {}
def editButtonFunc():
return {}
def hrefButtonFunc():
return {}
def tableButtonFunc():
return {}
def generic_buttonFunc(self,token):
vals = token.split_contents()
for i in range(0,len(vals)):
vals[i] = parsers.clean_alphabet_string_from_garbage(['\"'],vals[i])
return ButtonNode(vals)
def get_pk_from_form(self,token):
vals = token.split_contents()
return PkNode(vals)
def checkbox_button():
return {}
def new_project_button():
return {}
def progress_bar():
return {}
def run_step():
return {}
register.inclusion_tag('checkbox_utils.html')(checkbox_utils)
register.inclusion_tag('checkbox_button.html')(checkbox_button)
register.inclusion_tag('row_button.html')(tableButtonFunc)
register.inclusion_tag('new_project_button.html')(new_project_button)
register.inclusion_tag('progressBar.html')(progress_bar)
register.inclusion_tag('runStepUtils.html')(run_step)
register.tag('generic_buttonFunc')(generic_buttonFunc)
register.tag('get_pk_from_form', get_pk_from_form)
| {
"repo_name": "ehudmagal/robotqcapp",
"path": "kioskApp/templatetags/kioskApp_tags.py",
"copies": "2",
"size": "1824",
"license": "bsd-3-clause",
"hash": 5425122783379173000,
"line_mean": 34.0769230769,
"line_max": 115,
"alpha_frac": 0.6957236842,
"autogenerated": false,
"ratio": 3.583497053045187,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5279220737245187,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from py4j.java_gateway import JavaGateway, GatewayParameters
class HWSRestMethod(object):
gateway = JavaGateway(gateway_parameters=GatewayParameters(port=25535))
rest_method_java = gateway.entry_point
@staticmethod
def get(ak, sk, request_url, service_name, region):
return HWSRestMethod.rest_method_java.get(ak, sk, request_url, service_name, region)
@staticmethod
def put(ak, sk, request_url, body, service_name, region):
return HWSRestMethod.rest_method_java.put(ak, sk, request_url, body, service_name, region)
@staticmethod
def post(ak, sk, request_url, body, service_name, region):
return HWSRestMethod.rest_method_java.post(ak, sk, request_url, body, service_name, region)
@staticmethod
def patch(ak, sk, request_url, body, service_name, region):
return HWSRestMethod.rest_method_java.patch(ak, sk, request_url, body, service_name, region)
@staticmethod
def delete(ak, sk, requestUrl, service_name, region):
return HWSRestMethod.rest_method_java.delete(ak, sk, requestUrl, service_name, region)
| {
"repo_name": "Hybrid-Cloud/orchard",
"path": "code/hwcloud/java_gateway.py",
"copies": "4",
"size": "1128",
"license": "apache-2.0",
"hash": 3394146192442193400,
"line_mean": 37.8965517241,
"line_max": 100,
"alpha_frac": 0.7127659574,
"autogenerated": false,
"ratio": 3.4922600619195046,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6205026019319505,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from PyQt5.QtCore import (QThread,pyqtSignal,QSemaphore,QUrl)
from PyQt5.QtNetwork import (QNetworkAccessManager,QNetworkRequest)
import requests
import xxtea
import time
import json
import random
mutex=QSemaphore(20)
class SearchThread(QThread):
domain = 'kyfw.12306.cn' #请求域名(真实连接地址)
host='kyfw.12306.cn' #请求的域名(host)
http = requests.session()
stopSignal=False
threadId=1
leftTicketUrl="leftTicket/query"
requests.packages.urllib3.disable_warnings()
searchThreadCallback= pyqtSignal(list)
def __init__(self,from_station,to_station,train_date,threadId,leftTicketUrl,interval=2,domain=''):
super(SearchThread,self).__init__()
if domain!='':
self.domain=domain
self.threadId=threadId
self.from_station=from_station
self.to_station=to_station
self.train_date=train_date
self.interval=interval
self.leftTicketUrl=leftTicketUrl
def run(self):
time.sleep(self.threadId)
userAgent="Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36"
headers={'Referer':'https://kyfw.12306.cn/otn/leftTicket/init',"host":self.host\
,'Cache-Control':'no-cache','Pragma':"no-cache","User-Agent":userAgent,"X-Requested-With":"XMLHttpRequest"}
t=str(random.random())
dataUrl='?leftTicketDTO.train_date='+self.train_date\
+"&leftTicketDTO.from_station="+self.stationCode[self.from_station]+"&leftTicketDTO.to_station="+\
self.stationCode[self.to_station]+"&purpose_codes=ADULT"
logUrl='https://' + self.domain + '/otn/leftTicket/log'+dataUrl
url='https://' + self.domain + '/otn/'+self.leftTicketUrl+dataUrl
self.http.get(logUrl,verify=False,headers=headers)
jc_fromStation=xxtea.unicodeStr(self.from_station+","+self.stationCode[self.from_station])
jc_toStation=xxtea.unicodeStr(self.to_station+","+self.stationCode[self.to_station])
self.http.cookies.set("_jc_save_fromStation",jc_fromStation)
self.http.cookies.set("_jc_save_toStation",jc_toStation)
self.http.cookies.set('_jc_save_fromDate',self.train_date)
self.http.cookies.set('_jc_save_toDate',"2014-01-01")
self.http.cookies.set('_jc_save_wfdc_flag','dc')
ret=self.http.get(url,verify=False,headers=headers)
ticketInfo=ret.json()
if ticketInfo['status']!=True :
print(ticketInfo)
cookies=self.http.cookies.get_dict()
cookieStr=";".join('%s=%s' % (key, value) for (key, value) in cookies.items())
self.http.get(logUrl,verify=False,headers=headers)
self.req=QNetworkRequest()
self.req.setUrl(QUrl(url))
self.req.setRawHeader("Referer","https://kyfw.12306.cn/otn/leftTicket/init")
self.req.setRawHeader("host",self.host)
self.req.setRawHeader("Cache-Control","no-cache")
self.req.setRawHeader("Pragma","no-cache")
self.req.setRawHeader("User-Agent",userAgent)
self.req.setRawHeader("Cookie",cookieStr)
while not self.stopSignal:
mutex.acquire(1)
self.search_ticket(self.from_station,self.to_station,self.train_date)
mutex.release(1)
time.sleep(self.interval)
def search_ticket(self, fromStation, toStation, date):
try:
self.netWorkManager=QNetworkAccessManager()
self.reply=self.netWorkManager.get(self.req)
self.reply.ignoreSslErrors()
self.reply.finished.connect(self.search_finished)
self.exec()
except Exception as e:
print("ip:"+self.domain+"查询发生错误:"+e.__str__())
return False
def search_finished(self):
try:
ret=self.reply.readAll()
ret=str(ret,'utf8')
ticketInfo=json.loads(ret)
self.reply=None
self.netWorkManager=None
self.exit()
if ticketInfo['status']!=True or ticketInfo['messages']!=[] :
print(self.domain)
print(ticketInfo)
return False
if len(ticketInfo['data'])<=0:
return False
data=ticketInfo['data']
ret=None
ticketInfo=None
self.searchThreadCallback.emit(data)
except Exception as e:
print(e.__str__())
def load_station_code(self,stationCode):
self.stationCode = stationCode
return True
def stop(self):
self.stopSignal=True | {
"repo_name": "qhgongzi/xilinTicketV2",
"path": "searchthread.py",
"copies": "1",
"size": "4668",
"license": "bsd-3-clause",
"hash": -8067277004323722000,
"line_mean": 34.2442748092,
"line_max": 129,
"alpha_frac": 0.6288994801,
"autogenerated": false,
"ratio": 3.4785229841748304,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9521399054892759,
"avg_score": 0.01720468187641446,
"num_lines": 131
} |
__author__ = 'Administrator'
import json
import time
import random
from cinder.volume import driver
from hwcloud.database_manager import DatabaseManager
from hwcloud.hws_service.client import HWSClient
from cinder.openstack.common import log as logging
from cinder.volume.drivers.hws import sshutils as sshclient
from oslo.config import cfg
from keystoneclient.v2_0 import client as kc
from cinder.openstack.common import fileutils
from cinder.openstack.common import excutils
from cinder.image import image_utils
import traceback
import string
import os
hws_opts = [cfg.StrOpt('project_id', help='project_id'),
cfg.StrOpt('flavor_id', help='flavor id'),
cfg.StrOpt('vpc_id', help='vpc_id'),
cfg.StrOpt('subnet_id', help='subnet_id'),
cfg.StrOpt('image_id', help='image_id'),
cfg.StrOpt('gong_yao', help='gong yao'),
cfg.StrOpt('si_yao', help='si yao'),
cfg.StrOpt('service_region', help='region where resource to create in'),
cfg.StrOpt('resource_region', help='region where resource to create in'),
cfg.StrOpt('service_protocol', help='protocol', default='https'),
cfg.StrOpt('service_port', help='port', default='443'),
cfg.StrOpt('volume_type', help='default volume_typ', default='SATA')]
CONF = cfg.CONF
hws_group = 'hws'
CONF.register_opts(hws_opts, hws_group)
remote_vgw_keystone_opts = [
cfg.StrOpt('tenant_name',
default='admin',
help='tenant name for connecting to keystone in admin context'),
cfg.StrOpt('user_name',
default='cloud_admin',
help='username for connecting to cinder in admin context'),
cfg.StrOpt('keystone_auth_url',
default='https://identity.cascading.hybrid.huawei.com:443/identity-admin/v2.0',
help='value of keystone url'),
]
remote_vgw_keystone_group = 'keystone_authtoken'
CONF.register_opts(remote_vgw_keystone_opts, remote_vgw_keystone_group)
hws_vgw_opts = [
cfg.StrOpt('user_name',
default='root',
help='user name for local az hws v2v gateway host'),
cfg.StrOpt('password',
default='Huawei@CLOUD8!',
help='password for local az hws v2v gateway host'),
cfg.StrOpt('host_ip',
default='172.21.0.23',
help='ip for local az hws v2v gateway host'),
cfg.StrOpt('ssh_retry_times',
default='3',
help='ssh retry times'),
cfg.StrOpt('hws_instance_id',
# default='72dca101-e822-4923-a3a1-ffac838ff5d5',
default='a83325ee-4917-4896-9eac-227f5934115a',
help='hws vgw instance id'),
cfg.StrOpt('hws_vgw_ip',
# default='117.78.35.163',
default='117.78.36.181',
help='hws vgw instance id'),
]
hws_vgw_group = 'hws_vgw'
CONF.register_opts(hws_vgw_opts, hws_vgw_group)
LOG = logging.getLogger(__name__)
SATA = 'SATA'
SSD = 'SSD'
SAS = 'SAS'
SUPPORT_VOLUME_TYPE = [SATA, SSD, SAS]
HWS_SERVER_STATUS = {
'active': 'ACTIVE',
'shutoff': 'SHUTOFF'
}
HWS_REAL_DEVNAME = {
'/dev/sda': '/dev/xvda',
'/dev/sdb': '/dev/xvde',
'/dev/sdc': '/dev/xvdf',
'/dev/sdd': '/dev/xvdg',
'/dev/sde': '/dev/xvdh',
'/dev/sdf': '/dev/xvdi',
'/dev/sdg': '/dev/xvdj',
'/dev/sdh': '/dev/xvdk',
'/dev/sdi': '/dev/xvdl',
'/dev/sdj': '/dev/xvdm',
'/dev/sdk': '/dev/xvdn'
}
class HWSDriver(driver.VolumeDriver):
VERSION = "1.0"
def __init__(self, *args, **kwargs):
super(HWSDriver, self).__init__( *args, **kwargs)
gong_yao = CONF.hws.gong_yao
si_yao = CONF.hws.si_yao
region = CONF.hws.service_region
protocol = CONF.hws.service_protocol
port = CONF.hws.service_port
self.hws_client = HWSClient(gong_yao, si_yao, region, protocol, port)
self.db_manager = DatabaseManager()
self.project_id = CONF.hws.project_id
self.availability_zone = CONF.hws.resource_region
self.volume_type_default = CONF.hws.volume_type
self.hws_vgw_user = CONF.hws_vgw.user_name
self.hws_vgw_password = CONF.hws_vgw.password
self.hws_wgw_ip = CONF.hws_vgw.host_ip
self.hws_vgw_ip = CONF.hws_vgw.hws_vgw_ip
def create_volume(self, volume):
"""Create a volume.
"""
LOG.info('VOLUME: %s' % dir(volume))
LOG.info('IMAGE ID: %s' % volume.get('image_id'))
if not volume.get('image_id'):
volume_name = self._get_display_name(volume)
project_id = self.project_id
size = volume.size
volume_type = self.volume_type_default
job_info = self.hws_client.evs.create_volume(project_id, self.availability_zone,
size, volume_type, name=volume_name)
self._deal_with_job(job_info, project_id, self._add_volume_mapping_to_db, None, volume)
else:
return {'provider_location': 'HWS CLOUD'}
def _get_display_name(self, volume):
original_display_name = volume.display_name
if len(original_display_name) < 20:
display_name = original_display_name
else:
display_name = self._get_random_name(8)
return display_name
def _get_random_name(self, length):
return ''.join(random.sample(string.ascii_letters + string.digits, length))
def _get_instance_volume_list(self, instance_id):
"""
:param project_id: string, hws project id
:param volume_id: string, hws volume id
:return volume_list_rsp:
"""
volume_list_rsp = self.hws_client.ecs.get_volume_list(self.project_id, instance_id)
if volume_list_rsp['status'] != 200:
error_info = 'hws_v2v: get hws v2v gateway host volume list error, Exception: %s' \
% json.dumps(volume_list_rsp)
LOG.error(error_info)
raise Exception(error_info)
return volume_list_rsp
def _get_volume_detail(self, volume_id):
"""
:param project_id: string, hws project id
:param volume_id: string, hws volume id
:return volume_detail_rsp:
"""
volume_detail_rsp = self.hws_client.evs.get_volume_detail(self.project_id, volume_id)
if volume_detail_rsp['status'] != 200:
error_info = 'hws_v2v: get hws volume detail error, Exception: %s' \
% json.dumps(volume_detail_rsp)
LOG.error(error_info)
raise Exception(error_info)
return volume_detail_rsp
def _attach_volume(self, instance_id, volume_id, device_name):
"""
:param project: string, hws project id
:param instance_id: string, hws server id
:param volume_id: string, hws volume id
:param device_name: device name, e.g. '/dev/sdb'
:param cascading_volume_id: string, cascading volume id
:return:
"""
job_attach_volume = self.hws_client.ecs.attach_volume(self.project_id,
instance_id,
volume_id,
device_name)
self._deal_with_job(job_attach_volume, self.project_id)
def _deal_java_error(self, java_response):
"""
{
'status': 'error',
'body': {
'message': '<MESSAGE>',
'exception': '<EXCEPTION>'
}
}
:param java_response: dict
:return:
"""
if 'error' == java_response['status']:
error_message = java_response['body']['message']
exception = java_response['body']['exception']
LOG.error('Java error message: %s, exception: %s' % (error_message, exception))
raise exception.NovaException(exception)
if 200 == java_response['status']:
return
elif 202 == java_response['status']:
return
else:
error_info = json.dumps(java_response)
LOG.error(error_info)
raise Exception(error_info)
def _power_on(self, instance_id):
start_result = self.hws_client.ecs.start_server(self.project_id, instance_id)
self._deal_java_error(start_result)
def _power_off(self, instance_id):
stop_result = self.hws_client.ecs.stop_server(self.project_id, instance_id)
self._deal_java_error(stop_result)
def _get_server_status(self, instance_id):
try:
server = self.hws_client.ecs.get_detail(self.project_id, instance_id)
if server and server['status'] == 200:
status = server['body']['server']['status']
except Exception:
msg = traceback.format_exc()
raise Exception(msg)
return status
def _stop_server(self, instance_id):
status = self._get_server_status(instance_id)
if HWS_SERVER_STATUS['active'] == status:
self._power_off(instance_id)
time.sleep(20)
retry_times = 10
# query server status until server status is SHUTOFF
while retry_times > 0:
time.sleep(5)
status = self._get_server_status(instance_id)
LOG.error('status: %s' % status)
if HWS_SERVER_STATUS['shutoff'] == status:
break
retry_times -= 1
if HWS_SERVER_STATUS['shutoff'] != status:
msg = "hws_v2v: stop server failed, hws_instance_id: %s, status: %s " %\
(instance_id, status)
raise Exception(msg)
def _detach_volume(self, instance_id, volume_id):
"""
Detach the disk attached to the instance.
:param connection_info:
{
u'driver_volume_type': u'vcloud_volume',
u'serial': u'824d397e-4138-48e4-b00b-064cf9ef4ed8',
u'data': {
u'backend': u'vcloud',
u'qos_specs': None,
u'access_mode': u'rw',
u'display_name': u'volume_02',
u'volume_id': u'824d397e-4138-48e4-b00b-064cf9ef4ed8'
}
}
:param instance:
:param mountpoint: string, e.g. '/dev/sdb'
:param encryption:
:return:
"""
job_detach_volume = self.hws_client.ecs.detach_volume(self.project_id,
instance_id,
volume_id)
self._deal_with_job(job_detach_volume, self.project_id)
def _get_instance_next_devname(self, instance_id):
volume_list_rsp = self._get_instance_volume_list(instance_id)
volume_list = volume_list_rsp['body']['volumeAttachments']
used_device_letter = set()
all_letters = set(string.ascii_lowercase)
for volume in volume_list:
used_device_letter.add(volume.get('device')[-1])
unused_device_letter = list(all_letters - used_device_letter)
LOG.error(used_device_letter)
LOG.error(all_letters)
next_dev_name = volume.get('device')[:-1] + unused_device_letter[0]
return next_dev_name
def _get_management_url(self, kc, image_name, **kwargs):
endpoint_info = kc.service_catalog.get_endpoints(**kwargs)
endpoint_list = endpoint_info.get(kwargs.get('service_type'), None)
region_name = image_name.split('_')[-1]
if endpoint_list:
for endpoint in endpoint_list:
if region_name == endpoint.get('region'):
return endpoint.get('publicURL')
def _copy_volume_to_file(self, image_meta, dev_name):
image_id = image_meta.get('id')
dest_file_path = os.path.join('/tmp', image_id)
real_devname = HWS_REAL_DEVNAME[dev_name]
try:
ssh_client = sshclient.SSH(user=self.hws_vgw_user,
host=self.hws_vgw_ip,
password=self.hws_vgw_password)
# convert volume to image
cmd = 'qemu-img convert -c -O qcow2 %s %s' % \
(real_devname, dest_file_path)
LOG.error('begin time of %s is %s' %
(cmd, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()
)))
ssh_client.run(cmd)
LOG.debug("Finished running cmd : %s" % cmd)
LOG.error('end time of %s is %s' %
(cmd, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error('Failed to copy volume to image by vgw.',
traceback.format_exc())
finally:
if ssh_client:
# delete the temp file which is used for convert volume to image
ssh_client.close()
@sshclient.RetryDecorator(max_retry_count=CONF.hws_vgw.ssh_retry_times,
exceptions=(sshclient.SSHError, sshclient.SSHTimeout))
def _copy_file_to_remote_vgw(self, image_meta):
image_id = image_meta.get('id')
image_name = image_meta.get('name')
dest_file_path = os.path.join('/tmp', image_id)
kwargs = {
'auth_url': CONF.keystone_authtoken.keystone_auth_url,
'tenant_name': CONF.keystone_authtoken.tenant_name,
'user_name': CONF.keystone_authtoken.user_name,
'password': CONF.keystone_authtoken.password,
'insecure': True
}
keystone_client = kc.Client(**kwargs)
# get remote v2v gateway
vgw_url = self._get_management_url(keystone_client, image_name, service_type='v2v')
try:
ssh_client = sshclient.SSH(user=self.hws_vgw_user,
host=self.hws_vgw_ip,
password=self.hws_vgw_password)
LOG.debug('The remote vgw url is %(vgw_url)s',
{'vgw_url': vgw_url})
# eg: curl -X POST --http1.0 -T
# /tmp/467bd6e1-5a6e-4daa-b8bc-356b718834f2
# http://172.27.12.245:8090/467bd6e1-5a6e-4daa-b8bc-356b718834f2
cmd = 'curl -X POST --http1.0 -T %s ' % dest_file_path
cmd += vgw_url
if cmd.endswith('/'):
cmd += image_id
else:
cmd += '/' + image_id
LOG.error('begin time of %s is %s' %
(cmd, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()
)))
ssh_client.run(cmd)
LOG.error('end time of %s is %s' %
(cmd, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()
)))
LOG.debug("Finished running cmd : %s" % cmd)
ssh_client.run('rm -f %s' % dest_file_path)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error('Failed to copy volume to image by vgw.',
traceback.format_exc())
finally:
if ssh_client:
ssh_client.close()
def copy_volume_to_image(self, context, volume, image_service, image_meta):
container_format = image_meta.get('container_format')
#if container_format == 'vgw_url':
if container_format == 'bare':
try:
# 1.get the hws volume id
cascaded_volume_id = volume['id']
hws_volume_id = self.db_manager.get_cascaded_volume_id(cascaded_volume_id)
if not hws_volume_id:
msg = 'get hws volume id error, cascaded id: %s' % cascaded_volume_id
LOG.error(msg)
raise Exception('get hws volume id error')
# 2. get the hws_volume status
volume_detail_rsp = self._get_volume_detail(hws_volume_id)
status = volume_detail_rsp['body']['volume']['status']
# attachments = volume_detail_rsp['body']['volume']['attachments']
# attach_num = len(attachments)
# origin_instance_id = None
# attach_back = False
# 3. detach volume from origin instance
# if status == 'in-use':
# if attach_num != 1:
# msg = 'hws_v2v: get attachments info error, num: %s' % attach_num
# LOG.error(msg)
# raise Exception(msg)
# origin_instance_id = attachments[0]['server_id']
# # volume can only be detached when sever stop
# self._stop_server(origin_instance_id)
# self._detach_volume(origin_instance_id, hws_volume_id)
# attach_back = True
# volume_detail_rsp = self._get_volume_detail(hws_volume_id)
# status = volume_detail_rsp['body']['status']
# 4. attach volume to hws v2v gateway host
if status != 'available':
msg = 'attach volume to local v2v gateway host error, status : %s, cascaded_volume_id: %s, ' \
'hws_volume_id %s' % (status, cascaded_volume_id, hws_volume_id)
LOG.error(msg)
raise Exception('attach volume to local v2v gateway failed')
hws_vgw_instance_id = CONF.hws_vgw.hws_instance_id
# if not hws_vgw_instance_id:
# LOG.error(
# 'hws_v2v: get cascaded v2v gateway instance id error: %s' % CONF.hws_vgw.cascaded_instance_id)
# raise Exception('hws_v2v: get cascaded v2v gateway instance error.')
dev_name = self._get_instance_next_devname(hws_vgw_instance_id)
self._attach_volume(hws_vgw_instance_id, hws_volume_id, dev_name)
# 5. copy volume to file
self._copy_volume_to_file(image_meta, dev_name)
# 6. copy file to remote v2v gateway
# self._copy_file_to_remote_vgw(image_meta)
# 7. create a empty file to glance
with image_utils.temporary_file() as tmp:
image_utils.upload_volume(context,
image_service,
image_meta,
tmp)
fileutils.delete_if_exists(tmp)
# 8. detach volume from hws v2v gateway
self._stop_server(hws_vgw_instance_id)
self._detach_volume(hws_vgw_instance_id, hws_volume_id)
self._power_on(hws_vgw_instance_id)
finally:
attach_back = True
# if attach_back is True:
# origin_dev_name = attachments[0]['device']
# self._attach_volume(origin_instance_id, hws_volume_id, origin_dev_name)
# self._power_on(origin_instance_id)
@sshclient.RetryDecorator(max_retry_count=CONF.hws_vgw.ssh_retry_times,
exceptions=(sshclient.SSHError, sshclient.SSHTimeout))
def _copy_file_to_volume(self, image_id, dev_name):
try:
real_devname = HWS_REAL_DEVNAME[dev_name]
dest_file_path = os.path.join('/tmp', image_id)
ssh_client = sshclient.SSH(user=self.hws_vgw_user,
host=self.hws_vgw_ip,
password=self.hws_vgw_password)
# copy data to volume
cmd = 'qemu-img convert %s %s' % \
(dest_file_path, real_devname)
ssh_client.run(cmd)
LOG.debug("Finished running cmd : %s" % cmd)
# cmd = 'rm -rf %s' % dest_file_path
# ssh_client.run(cmd)
except Exception as e:
LOG.error('Failed to copy data to volume from vgw. '
'traceback: %s', traceback.format_exc())
raise e
finally:
if ssh_client:
ssh_client.close()
def copy_image_to_volume(self, context, volume, image_service, image_id):
image_meta = image_service.show(context, image_id)
container_format = image_meta.get('container_format')
# if container_format == 'vgw_url':
if container_format == 'bare':
# 1.get the hws_volume_id
cascaded_volume_id = volume['id']
self.create_volume(volume)
hws_volume_id = self.db_manager.get_cascaded_volume_id(cascaded_volume_id)
if not cascaded_volume_id:
LOG.error('get cascaded volume id error: %s' % cascaded_volume_id)
raise Exception('get cascaded volume id error.')
# 2. get the hws_volume status
time.sleep(30)
retry_times = 10
while retry_times > 0:
volume_detail_rsp = self._get_volume_detail(hws_volume_id)
status = volume_detail_rsp['body']['volume']['status']
if status == 'available':
break
else:
time.sleep(5)
retry_times -= 1
if status != 'available':
LOG.error('create hws volume failed, status: %s, cascaded_volume_id: %s, hws_volume_id: %s'
% (status, cascaded_volume_id, hws_volume_id))
raise Exception('create hws volume failed.')
# 2. attach volume to hws v2v gateway host
hws_vgw_instance_id = CONF.hws_vgw.hws_instance_id
# if not hws_vgw_instance_id:
# LOG.error('hws_v2v: get cascaded v2v gateway instance id error.' % CONF.hws_vgw.cascaded_instance_id)
# raise Exception('get cascaded v2v gateway instance id error.')
dev_name = self._get_instance_next_devname(hws_vgw_instance_id)
self._attach_volume(hws_vgw_instance_id, hws_volume_id, dev_name)
# 3. copy image's file to volume
self._copy_file_to_volume(image_id, dev_name)
# 4. detach volume from hws v2v gateway
self._stop_server(hws_vgw_instance_id)
self._detach_volume(hws_vgw_instance_id, hws_volume_id)
self._power_on(hws_vgw_instance_id)
# Not to create volume when call cinder create volume API
# Only when attache or detach, or create server by volume, then create volume.
elif not image_id:
volume_name = self._get_display_name(volume)
project_id = self.project_id
size = volume.size
volume_type = self.volume_type_default
image_hws_id = self._get_cascaded_image_id(image_id)
job_info = self.hws_client.evs.create_volume(project_id, self.availability_zone,
size, volume_type, name=volume_name, imageRef=image_hws_id)
self._deal_with_job(job_info, project_id, self._add_volume_mapping_to_db, None, volume)
def _get_volume_type(self, volume_type):
if volume_type not in SUPPORT_VOLUME_TYPE:
LOG.info('VOLUME TYPE: %s is not support in HWS Clouds, support type is: [%s]. Use SATA as default' %
(volume_type, SUPPORT_VOLUME_TYPE))
volume_type = SATA
return volume_type
def _get_cascaded_image_id(self, cascading_image_id):
cascaded_image_id = self.db_manager.get_cascaded_image_id(cascading_image_id)
if not cascaded_image_id:
LOG.error('No image mapping in HWS Cloud.')
raise Exception('No image mapping in HWS Cloud.')
return cascaded_image_id
def _add_volume_mapping_to_db(self, job_detail_of_create_volume, volume):
"""
:param job_detail_of_create_volume:
:return:
"""
hws_volume_id = job_detail_of_create_volume['body']['entities']['volume_id']
volume_id = volume.id
self.db_manager.add_volume_mapping(volume_id, hws_volume_id)
LOG.info('Success to add volume mapping: {%s: %s}' % (volume_id, hws_volume_id))
def _deal_with_job(self, job_info, project_id,
function_deal_with_success=None,
function_deal_with_fail=None,
object=None):
if job_info['status'] == 200:
job_id = job_info['body']['job_id']
while True:
time.sleep(5)
job_detail_info = self.hws_client.evs.get_job_detail(project_id, job_id)
if job_detail_info:
if job_detail_info['status'] == 200:
job_status = job_detail_info['body']['status']
if job_status == 'RUNNING':
LOG.debug('job<%s> is still RUNNING.' % job_id)
continue
elif job_status == 'FAIL':
if function_deal_with_fail:
function_deal_with_fail(job_detail_info, object)
error_info = 'job<%s> FAIL, ERROR INFO: %s' % (job_id, json.dumps(job_detail_info))
raise Exception(error_info)
elif job_status == 'SUCCESS':
if function_deal_with_success:
function_deal_with_success(job_detail_info, object)
success_info = 'job<%s> SUCCESS.' % job_id
LOG.info(success_info)
break
elif job_detail_info['status'] == 'error':
error_message = job_detail_info['body']['message']
exception = job_detail_info['body']['exception']
LOG.error('Java error message: %s, exception: %s' % (error_message, exception))
continue
else:
info = json.dumps(job_detail_info)
LOG.info('Job info get has some issue: %s, will retry to get again.' % info )
continue
else:
retry_info = 'job detail info is empty, will retry to get. JOB DETAIL: %s' % job_detail_info
LOG.info(retry_info)
continue
else:
error_info = json.dumps(job_info)
LOG.error('Job init FAIL, error info: %s' % error_info)
raise Exception(error_info)
def _deal_with_create_volume_fail(self, job_detail_info, volume):
"""
deal with create volume fail.
If hws volume is created, but fail, then save id mapping in db. then raise exception.
if hws volume id is not created, raise exception directly.
{
"body": {
"status": "FAIL",
"entities": {
"volume_id": "1be7a768-59b6-4ef6-b4c0-a4f8039fa626"
},
"job_id": "8aace0c751b0a3bd01523529e4f70d35",
"job_type": "createVolume",
"begin_time": "2016-01-12T09:28:04.086Z",
"end_time": "2016-01-12T09:28:32.252Z",
"error_code": "EVS.2024",
"fail_reason": "EbsCreateVolumeTask-fail:volume is error!"
},
"status": 200
}
:param job_detail_info:
:param volume:
:return:
"""
job_id = job_detail_info.get('body').get('job_id')
error_info = 'job<%s> FAIL, ERROR INFO: %s' % (job_id, json.dumps(job_detail_info))
if job_detail_info.get('body').get('entities'):
hws_volume_id = job_detail_info.get('body').get('entities').get('volume_id')
if hws_volume_id:
LOG.info('HWS volume is created, id is: %s' % hws_volume_id)
volume_id = volume.id
self.db_manager.add_volume_mapping(volume_id, hws_volume_id)
LOG.debug('Success to add volume mapping: {%s: %s}' % (volume_id, hws_volume_id))
raise Exception(error_info)
raise Exception(error_info)
def delete_volume(self, volume):
cascading_volume_id = volume.id
project_id = self.project_id
cascaded_volume_id = self.db_manager.get_cascaded_volume_id(cascading_volume_id)
LOG.info('VOLUME_ID: %s' % cascaded_volume_id)
if cascaded_volume_id:
volume_get = self.hws_client.evs.get_volume_detail(project_id, cascaded_volume_id)
if volume_get['status'] == 200:
job_info = self.hws_client.evs.delete_volume(project_id, cascaded_volume_id)
self._deal_with_job(job_info,project_id, self._delete_volume_mapping, None, volume)
elif volume_get['status'] == 404 and volume_get.get('body').get('itemNotFound'):
LOG.info('cascaded volume is not exist, so directly return delete success')
return
else:
error_info = 'Delete volume fail, Exception: %s' % json.dumps(volume_get)
LOG.error(error_info)
raise Exception(error_info)
else:
LOG.info('cascaded volume is not exist, so directly return delete success')
return
def _delete_volume_mapping(self, job_detail_info, volume):
cascading_volume_id = volume.id
self.db_manager.delete_volume_mapping(cascading_volume_id)
LOG.info('Delete volume mapping for cascading volume id: %s' % cascading_volume_id)
def get_volume_stats(self, refresh=False):
"""Get volume stats."""
# pdb.set_trace()
if not self._stats:
backend_name = self.configuration.safe_get('volume_backend_name')
LOG.debug('*******backend_name is %s' %backend_name)
if not backend_name:
backend_name = 'HC_HWS'
data = {'volume_backend_name': backend_name,
'vendor_name': 'Huawei',
'driver_version': self.VERSION,
'storage_protocol': 'LSI Logic SCSI',
'reserved_percentage': 0,
'total_capacity_gb': 1000,
'free_capacity_gb': 1000}
self._stats = data
return self._stats
def initialize_connection(self, volume, connector):
"""Allow connection to connector and return connection info."""
LOG.debug('vCloud Driver: initialize_connection')
driver_volume_type = 'hwclouds_volume'
data = {}
data['backend'] = 'hwclouds'
data['volume_id'] = volume['id']
data['display_name'] = volume['display_name']
return {'driver_volume_type': driver_volume_type,
'data': data}
def check_for_setup_error(self):
"""Check configuration file."""
pass
def create_cloned_volume(self, volume, src_vref):
"""Create a clone of the specified volume."""
pass
def create_export(self, context, volume):
"""Export the volume."""
pass
def create_snapshot(self, snapshot):
pass
def create_volume_from_snapshot(self, volume, snapshot):
"""Create a volume from a snapshot."""
pass
def delete_snapshot(self, snapshot):
"""Delete a snapshot."""
pass
def do_setup(self, context):
"""Instantiate common class and log in storage system."""
pass
def ensure_export(self, context, volume):
"""Synchronously recreate an export for a volume."""
pass
def extend_volume(self, volume, new_size):
"""Extend a volume."""
pass
def remove_export(self, context, volume):
"""Remove an export for a volume."""
pass
def terminate_connection(self, volume, connector, **kwargs):
"""Disallow connection from connector"""
LOG.debug('vCloud Driver: terminate_connection')
pass
def validate_connector(self, connector):
"""Fail if connector doesn't contain all the data needed by driver."""
LOG.debug('vCloud Driver: validate_connector')
pass
| {
"repo_name": "nash-x/hws",
"path": "cinder/volume/drivers/hws/__init__.py",
"copies": "1",
"size": "32642",
"license": "apache-2.0",
"hash": 468459309570271170,
"line_mean": 42.1772486772,
"line_max": 120,
"alpha_frac": 0.5405612401,
"autogenerated": false,
"ratio": 3.7911730545876887,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48317342946876884,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import math
import urllib.parse
def stringToLongArray(string, includeLength):
length = len(string)
result = []
i = 0;
while (i < length):
re = 0
try:
re = ord(string[i:i + 1])
re = re | (ord(string[i + 1:i + 2]) << 8)
re = re | (ord(string[i + 2:i + 3]) << 16)
re = re | (ord(string[i + 3:i + 4]) << 24)
except:
pass
result.append(re)
i += 4
if (includeLength):
result.append(length)
return result
def longArrayToString(data, includeLength):
length = len(data)
n = (length - 1) << 2
if (includeLength):
m = data[length - 1]
if ((m < n - 3) or (m > n)):
return None
n = m
bdata = ""
for i in range(0, length): # (var i = 0;i < length;i ++ ):
bdata = bdata + chr(data[i] & 0xff) + chr(data[i] >> 8 & 0xff) + chr(data[i] >> 16 & 0xff) + chr(
data[i] >> 24 & 0xff)
if (includeLength):
return bdata[0:n]
else:
return bdata
def Base32encrypt(string, key):
if (string == ""):
return "";
delta = 0x9E3779B8
v = stringToLongArray(string, True)
k = stringToLongArray(key, False)
k_length = len(k)
if (k_length < 4):
k.append(0)
k.append(0)
k.append(0)
k_length = 4
v_length = len(v)
n = v_length - 1;
z = v[n]
y = v[0];
q = math.floor(6 + 52 / (n + 1))
sum = 0
while (0 < q ):
q = q - 1
sum = sum + delta & 0xffffffff;
e = sum >> 2 & 3;
for p in range(0, n): # (p = 0; p < n; p ++ ):
y = v[p + 1];
mx = (z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[p & 3 ^ e] ^ z);
z = v[p] = v[p] + mx & 0xffffffff;
p = n
y = v[0];
mx = (z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[p & 3 ^ e] ^ z);
z = v[n] = v[n] + mx & 0xffffffff;
return longArrayToString(v, False);
def encode32(inputbuf):
keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
#inputbuf = escape(inputbuf);
output = "";
i = 0;
sing = 0
while (i < len(inputbuf)):
chr1 = ord(inputbuf[i:i+1])
i = i + 1
try:
chr2 = ord(inputbuf[i:i+1])
i = i + 1
except:
chr2 = 0
sing = sing or 0x1
try:
chr3 = ord(inputbuf[i:i+1])
i = i + 1
except:
chr3 = 0
sing = sing or 0x2
enc1 = chr1 >> 2;
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
enc4 = chr3 & 63;
if (sing and 0x1):
enc3 = enc4 = 64
elif (sing and 0x2):
enc4 = 64
output = output + keyStr[enc1:enc1+1] + keyStr[enc2:enc2+1] + keyStr[enc3:enc3+1] + keyStr[enc4:enc4+1]
return output
def bin216(s):
b=""
for x in s:
char=hex(ord(x))[2:]
if len(char)<2:
char="0"+char
b+=char
return b
def encrypt(text, key):
b = Base32encrypt(text, key)
data = encode32(bin216(b))
return data
def unicodeStr(s):
v1=s.encode("unicode-escape").decode("utf8")
v2=urllib.parse.quote_plus(v1).upper()
v3=v2.replace("%5CU","%u")
return v3
| {
"repo_name": "qhgongzi/xilinTicketV2",
"path": "xxtea.py",
"copies": "1",
"size": "3382",
"license": "bsd-3-clause",
"hash": -3278317229925475000,
"line_mean": 25.2170542636,
"line_max": 111,
"alpha_frac": 0.4568302779,
"autogenerated": false,
"ratio": 2.948561464690497,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39053917425904966,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import pygame
import constants
from spritesheet_functions import SpriteSheet
class Player2(pygame.sprite.Sprite):
""" This class represents the bar at the bottom that the player
controls. """
# -- Attributes
# Set speed vector of player
change_x = 0
change_y = 0
# This holds all the images for the animated walk left/right
# of our player
walking_frames_l = []
walking_frames_r = []
# What direction is the player facing?
direction = "R"
# List of sprites we can bump against
level = None
# -- Methods
def __init__(self):
""" Constructor function """
# Call the parent's constructor
pygame.sprite.Sprite.__init__(self)
sprite_sheet = SpriteSheet("p2.png")
# Load all the right facing images into a list
image = sprite_sheet.get_image(0, 0, 66, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(66, 0, 66, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(132, 0, 67, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(0, 93, 66, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(66, 93, 66, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(132, 93, 72, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(0, 186, 70, 90)
self.walking_frames_r.append(image)
# Load all the right facing images, then flip them
# to face left.
image = sprite_sheet.get_image(0, 0, 66, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(66, 0, 66, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(132, 0, 67, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(0, 93, 66, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(66, 93, 66, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(132, 93, 72, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(0, 186, 70, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
# Set the image the player starts with
self.image = self.walking_frames_r[0]
# Set a referance to the image rect.
self.rect = self.image.get_rect()
class P2(pygame.sprite.Sprite):
""" Platform the user can jump on """
def __init__(self):
""" Constructor function """
# Call the parent's constructor
pygame.sprite.Sprite.__init__(self)
sprite_sheet = SpriteSheet("p2.png")
# Load all the right facing images into a list
image = sprite_sheet.get_image(0, 0, 66, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(66, 0, 66, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(132, 0, 67, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(0, 93, 66, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(66, 93, 66, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(132, 93, 72, 90)
self.walking_frames_r.append(image)
image = sprite_sheet.get_image(0, 186, 70, 90)
self.walking_frames_r.append(image)
# Load all the right facing images, then flip them
# to face left.
image = sprite_sheet.get_image(0, 0, 66, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(66, 0, 66, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(132, 0, 67, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(0, 93, 66, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(66, 93, 66, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(132, 93, 72, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(0, 186, 70, 90)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
# Set the image the player starts with
self.image = self.walking_frames_r[0]
# Set a referance to the image rect.
self.rect = self.image.get_rect()
class p3(P2):
""" This is a fancier platform that can actually move. """
change_x = 0
change_y = 0
boundary_top = 0
boundary_bottom = 0
boundary_left = 0
boundary_right = 0
level = None
player = None
def update(self):
""" Move the platform.
If the player is in the way, it will shove the player
out of the way. This does NOT handle what happens if a
platform shoves a player into another object. Make sure
moving platforms have clearance to push the player around
or add code to handle what happens if they don't. """
# Move left/right
self.rect.x += self.change_x
# See if we hit the player
hit = pygame.sprite.collide_rect(self, self.player)
if hit:
# We did hit the player. Shove the player around and
# assume he/she won't hit anything else.
# If we are moving right, set our right side
# to the left side of the item we hit
if self.change_x < 0:
self.player.rect.right = self.rect.left
else:
# Otherwise if we are moving left, do the opposite.
self.player.rect.left = self.rect.right
# Move up/down
self.rect.y += self.change_y
# Check and see if we the player
hit = pygame.sprite.collide_rect(self, self.player)
if hit:
# We did hit the player. Shove the player around and
# assume he/she won't hit anything else.
# Reset our position based on the top/bottom of the object.
if self.change_y < 0:
self.player.rect.bottom = self.rect.top
else:
self.player.rect.top = self.rect.bottom
# Check the boundaries and see if we need to reverse
# direction.
if self.rect.bottom > self.boundary_bottom or self.rect.top < self.boundary_top:
self.change_y *= -1
cur_pos = self.rect.x - self.level.world_shift
if cur_pos < self.boundary_left or cur_pos > self.boundary_right:
self.change_x *= -1 | {
"repo_name": "saintdragon2/python-3-lecture-2015",
"path": "civil_mid_final/10조/x.py",
"copies": "1",
"size": "7441",
"license": "mit",
"hash": 263786467389729300,
"line_mean": 35.8415841584,
"line_max": 88,
"alpha_frac": 0.6071764548,
"autogenerated": false,
"ratio": 3.5705374280230324,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.96742002784164,
"avg_score": 0.0007027208813262736,
"num_lines": 202
} |
__author__ = 'Administrator'
import requests
import xlstr
import time
class Ticket:
#车票信息
train_no = ''
station_train_code = '' #车次编号,例如K540
from_station_telecode = ''
from_station_name = ''
to_station_telecode = ''
to_station_name = ''
yp_info = '' #未知信息
location_code = ''
secret_str = ''
start_train_date = '' #乘车日期,例如20140127
#乘车信息
train_date = ''
train_date_utc=''
seat_type = ''
def __init__(self, ticket_obj, buy_type):
self.train_no = ticket_obj['queryLeftNewDTO']['train_no']
self.from_station_telecode = ticket_obj['queryLeftNewDTO']['from_station_telecode']
self.from_station_name = ticket_obj['queryLeftNewDTO']['from_station_name']
self.to_station_telecode = ticket_obj['queryLeftNewDTO']['to_station_telecode']
self.to_station_name = ticket_obj['queryLeftNewDTO']['to_station_name']
self.yp_info = ticket_obj['queryLeftNewDTO']['yp_info']
self.start_train_date = ticket_obj['queryLeftNewDTO']['start_train_date']
self.location_code = ticket_obj['queryLeftNewDTO']['location_code']
self.secret_str = ticket_obj['secretStr']
self.station_train_code = ticket_obj['queryLeftNewDTO']['station_train_code']
trainTime = time.strptime(self.start_train_date, '%Y%m%d')
self.train_date = time.strftime('%Y-%m-%d', trainTime)
self.train_date_utc=time.strftime('%a %b %d %H:%M:%S UTC+0800 %Y',trainTime)
self.seat_type = buy_type
SeatType={'M':'一等座','O':'二等座','4':'软卧','3':'硬卧','1':'硬座'} | {
"repo_name": "qhgongzi/xilinTicketV2",
"path": "ticket.py",
"copies": "1",
"size": "1652",
"license": "bsd-3-clause",
"hash": -6177668610600837000,
"line_mean": 34.8409090909,
"line_max": 91,
"alpha_frac": 0.6218274112,
"autogenerated": false,
"ratio": 2.8654545454545453,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39872819566545453,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import requests
import xlstr
import urllib.parse
import urllib
import xxtea
import random
import time
class C12306:
username = ''
password = ''
domain = 'kyfw.12306.cn' #请求域名(真实连接地址)
host='kyfw.12306.cn' #请求的域名(host)
http = requests.session()
leftTicketUrl="leftTicket/query"
stationCode = {}
loginDynamicKey=''
loginDynamicVal=''
def __init__(self,domain=''):
if domain!='':
self.domain=domain
self.load_login_page()
self.load_station_code()
def load_login_page(self):
headers = {'X-Requested-With': 'XMLHttpRequest','host':self.host,"Referer":"https://kyfw.12306.cn/otn/login/init"}
self.http.get("https://" + self.domain + "/otn/", verify=False,headers=headers)
res = self.http.get('https://'+self.domain+'/otn/login/init', verify=False,headers=headers)
assert isinstance(res, requests.Response)
if not 'src=\"/otn/dynamicJs/' in res.text:
raise C12306Error('初始化页面错误')
dynamic_js_url = xlstr.substr(res.text, "src=\"/otn/dynamicJs/", "\"")
ret=self.http.get("https://"+self.domain+"/otn/dynamicJs/" + dynamic_js_url, verify=False,headers=headers).text
self.loginDynamicKey=xlstr.substr(ret,"gc(){var key='","'")
self.loginDynamicVal=(xxtea.encrypt("1111",self.loginDynamicKey))
#隐藏的监测url
ready_str=xlstr.substr(ret,"$(document).ready(",")};")
if ready_str.find("jq({url")>0 :
checkHelperUrl=xlstr.substr(ready_str,"jq({url :'","'")
self.http.get("https://"+self.domain+checkHelperUrl,verify=False,headers=headers)
def load_station_code(self):
"""Load station telcode from 12306.cn
加载车站电报码,各个请求中会用到
:raise C12306Error:
"""
headers = {'X-Requested-With': 'XMLHttpRequest','host':self.host,"Referer":"https://kyfw.12306.cn/otn/login/init"}
res = requests.get('https://'+self.domain+'/otn/resources/js/framework/station_name.js', verify=False,headers=headers)
if res.status_code != 200:
raise C12306Error('加载车站信息错误,请重开!')
stationStrs = xlstr.substr(res.text, "'", "'")
stationList = stationStrs.split('@')
stationDict = {}
for stationStr in stationList:
station = stationStr.split("|")
if len(station) > 3:
stationDict[station[1]] = station[2]
self.stationCode = stationDict
def login(self, username, password, auth_code):
self.username = username
self.password = password
headers = {'X-Requested-With': 'XMLHttpRequest','host':self.host,"Referer":"https://kyfw.12306.cn/otn/login/init"}
checkData={"randCode":auth_code,"rand":"sjrand","randCode_validate":""}
self.http.post("https://" + self.domain + "/otn/passcodeNew/checkRandCodeAnsyn",checkData,verify=False,headers=headers)
time.sleep(1)
data = {'loginUserDTO.user_name': self.username, 'userDTO.password': self.password, 'randCode': auth_code\
,"randCode_validate":"","myversion":"undefined"}
data[self.loginDynamicKey]=self.loginDynamicVal
res = self.http.post("https://" + self.domain + "/otn/login/loginAysnSuggest", data, verify=False,
headers=headers)
if not 'loginCheck":"Y"},"' in res.text:
print(res.text)
raise C12306Error('登录失败:' + ''.join(res.json()['messages']))
return True
def auth_code_img(self, module='passenger'):
if module=='passenger':
url="https://" + self.domain + "/otn/passcodeNew/getPassCodeNew.do?module=passenger&rand=randp"
else:
url = "https://" + self.domain + "/otn/passcodeNew/getPassCodeNew?module=" + module + "&rand=sjrand"
res = self.http.get(url, verify=False,headers={"host":self.host})
assert isinstance(res, requests.Response)
return res.content
def load_search_page(self):
headers={'Referer':'https://kyfw.12306.cn/otn/login/init',"host":self.host}
leftText=self.http.get("https://"+self.domain+"/otn/leftTicket/init",verify=False,headers=headers).text
self.leftTicketUrl=xlstr.substr(leftText,"var CLeftTicketUrl = '","'")
dynamic_js_url = xlstr.substr(leftText, "src=\"/otn/dynamicJs/", "\"")
ret=self.http.get("https://"+self.domain+"/otn/dynamicJs/" + dynamic_js_url, verify=False,headers=headers).text
#隐藏的监测url
ready_str=xlstr.substr(ret,"$(document).ready(",")};")
if ready_str.find("jq({url")>0 :
checkHelperUrl=xlstr.substr(ready_str,"jq({url :'","'")
self.http.get("https://"+self.domain+checkHelperUrl,verify=False,headers=headers)
self.searchDynamicKey=xlstr.substr(ret,"gc(){var key='","'")
self.searchDynamicVal=urllib.parse.quote_plus(xxtea.encrypt("1111",self.searchDynamicKey))
self.auth_code_img('login')
def search_ticket(self, fromStation, toStation, date):
headers={'Referer':'https://kyfw.12306.cn/otn/leftTicket/init',"host":self.host\
,"X-Requested-With":"XMLHttpRequest"}
jc_fromStation=xxtea.unicodeStr(fromStation+","+self.stationCode[fromStation])
jc_toStation=xxtea.unicodeStr(toStation+","+self.stationCode[toStation])
self.http.cookies.set("_jc_save_fromStation",jc_fromStation)
self.http.cookies.set("_jc_save_toStation",jc_toStation)
self.http.cookies.set('_jc_save_fromDate',date)
self.http.cookies.set('_jc_save_toDate',"2014-05-01")
self.http.cookies.set('_jc_save_wfdc_flag','dc')
t=str(random.random())
dataUrl='?leftTicketDTO.train_date='+date\
+"&leftTicketDTO.from_station="+self.stationCode[fromStation]+"&leftTicketDTO.to_station="+\
self.stationCode[toStation]+"&purpose_codes=ADULT"
logUrl='https://' + self.domain + '/otn/leftTicket/log'+dataUrl
url='https://' + self.domain + '/otn/'+self.leftTicketUrl+dataUrl
self.http.get(logUrl,verify=False,headers=headers)
res = self.http.get(url,verify=False,headers=headers)
ticketInfo=res.json()
if ticketInfo['status']!=True or ticketInfo['messages']!=[] :
print(ticketInfo)
raise C12306Error('查询错误:'+''.join(ticketInfo['messages']))
if len(ticketInfo['data'])<=0:
raise C12306Error('查询错误:'+'没有找到直达车次,请重新查询!')
return ticketInfo['data']
def submit_order(self,ticket):
"""
:param ticket: Ticket
:return: :raise C12306Error:
"""
data='secretStr='+ticket.secret_str+'&train_date='+ticket.train_date+'&back_train_date='+ticket.train_date+\
'&tour_flag=dc&purpose_codes=ADULT&query_from_station_name='+ticket.from_station_name+\
'&query_to_station_name='+ticket.to_station_name+'&undefined&'+self.searchDynamicKey+"="+self.searchDynamicVal
headers={'Referer':'https://kyfw.12306.cn/otn/leftTicket/init','X-Requested-With':'XMLHttpRequest'\
,'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',"host":self.host}
data=data.encode()
self.http.post('https://'+self.domain+'/otn/login/checkUser',{"_json_att":""},verify=False,headers=headers)
res=self.http.post('https://'+self.domain+'/otn/leftTicket/submitOrderRequest',data,verify=False,headers=headers)
orderInfo=res.json()
if orderInfo['status']!=True:
raise C12306Error('提交订单错误:'+''.join(orderInfo['messages']))
res=self.http.post('https://'+self.domain+'/otn/confirmPassenger/initDc',{'_json_att':''},verify=False,headers=headers)
pageText=res.text
self.Token=xlstr.substr(pageText,"globalRepeatSubmitToken = '","'")
self.keyCheck=xlstr.substr(pageText,"'key_check_isChange':'","'")
self.leftTicketStr=xlstr.substr(pageText,"leftTicketStr':'","'")
self.trainLocation=xlstr.substr(pageText,"train_location':'","'")
dynamic_js_url = xlstr.substr(res.text, "src=\"/otn/dynamicJs/", "\"");
dynamic_js = self.http.get("https://"+self.domain +"/otn/dynamicJs/"+ dynamic_js_url, verify=False,headers=headers)
#隐藏的监测url
ready_str=xlstr.substr(dynamic_js.text,"$(document).ready(",")};")
if ready_str.find("jq({url")>0 :
checkHelperUrl=xlstr.substr(ready_str,"jq({url :'","'")
self.http.get("https://"+self.domain+checkHelperUrl,verify=False,headers=headers)
self.dynamicKey=xlstr.substr(dynamic_js.text,"gc(){var key='","'")
self.dynamicVal=xxtea.encrypt("1111",self.dynamicKey)
self.dynamicVal=urllib.parse.quote_plus(self.dynamicVal)
if len(self.Token)!=32 :
raise C12306Error('预定页面获取失败!')
return True
def check_order(self,Ticket,passengerList,randCode):
if len(passengerList)<1 :
raise C12306Error('没有勾选乘客,无法购票!')
headers={'Referer':'https://kyfw.12306.cn/otn/confirmPassenger/initDc','X-Requested-With':'XMLHttpRequest'\
,'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',"host":self.host}
checkData={"randCode":randCode,"rand":"randp","REPEAT_SUBMIT_TOKEN":self.Token}
ret=self.http.post("https://" + self.domain + "/otn/passcodeNew/checkRandCodeAnsyn",checkData,verify=False,headers=headers)
ret=ret.json()
time.sleep(1)
if(ret['status']!=True or ret['data']['result']!='1'):
print(ret)
return False
ticketInfo=[]
oldPassengerInfo=[]
for passenger in passengerList:
ticketStr=Ticket.seat_type+",0,"+passenger['ticketType']+","+passenger['name']+',1,'+passenger['idcard']+','+\
passenger['telephone']+',N'
ticketInfo.append(ticketStr)
oldPStr=passenger['name']+',1,'+passenger['idcard']+','+passenger['ticketType']
oldPassengerInfo.append(oldPStr)
ticketStrs=urllib.parse.quote_plus('_'.join(ticketInfo))
oldPassengerStrs=urllib.parse.quote_plus('_'.join(oldPassengerInfo))
pstr="cancel_flag=2&bed_level_order_num=000000000000000000000000000000&passengerTicketStr="+ticketStrs\
+"&oldPassengerStr="+oldPassengerStrs+"&tour_flag=dc&randCode="+randCode+"&"+self.dynamicKey+"="+self.dynamicVal+\
"&_json_att=&REPEAT_SUBMIT_TOKEN="+self.Token
pstr=pstr.encode()
res=self.http.post('https://'+self.domain+'/otn/confirmPassenger/checkOrderInfo',pstr,verify=False,headers=headers)
orderInfo=res.json()
if orderInfo['status']!=True:
raise C12306Error('提交订单错误:'+''.join(orderInfo['messages']))
if orderInfo['data']['submitStatus'] ==False and '验证码' in orderInfo['data']['errMsg']:
return False
if orderInfo['data']['submitStatus'] ==True:
pstr="train_date="+urllib.parse.quote_plus(Ticket.train_date_utc)+"&train_no="+Ticket.train_no+"&stationTrainCode="+\
Ticket.station_train_code+"&seatType="+Ticket.seat_type+"&fromStationTelecode="+\
Ticket.from_station_telecode+"&toStationTelecode="+Ticket.to_station_telecode+"&leftTicket="+\
Ticket.yp_info+"&purpose_codes=00&_json_att=&REPEAT_SUBMIT_TOKEN="+self.Token
pstr=pstr.encode()
res=self.http.post('https://'+self.domain+'/otn/confirmPassenger/getQueueCount',pstr,verify=False,headers=headers).json()
if res['status']!=True:
raise C12306Error('查询排队队列错误:'+''.join(res['messages']))
if res['data']['op_2']!='false':
raise C12306Error('排队人数已满,取消操作!')
else:
self.passengerStr="passengerTicketStr="+ticketStrs+"&oldPassengerStr="+oldPassengerStrs
return True
else:
raise C12306Error('订单错误:'+''.join(orderInfo['data']['errMsg']))
def confirm_order(self,randCode):
pstr=self.passengerStr+"&randCode="+randCode+"&purpose_codes=00"+\
"&key_check_isChange="+self.keyCheck+"&leftTicketStr="+self.leftTicketStr+\
"&train_location="+self.trainLocation+"&_json_att=&REPEAT_SUBMIT_TOKEN="+self.Token
pstr=pstr.encode()
headers={'Referer':'https://kyfw.12306.cn/otn/confirmPassenger/initDc','X-Requested-With':'XMLHttpRequest'\
,'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',"host":self.host}
res=self.http.post("https://"+self.domain+"/otn/confirmPassenger/confirmSingleForQueue",pstr,verify=False,headers=headers).json()
if res['status']==True :
return True
elif '请重试' in res['messages']:
return self.confirm_order(randCode)
else:
raise C12306Error('确认订单出现错误:'+res['messages'])
def keep_online(self):
print("keep online")
headers={'Referer':'https://kyfw.12306.cn/otn/leftTicket/init','X-Requested-With':'XMLHttpRequest'\
,'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',"host":self.host}
self.http.get("https://"+self.domain+"/otn/queryOrder/initNoComplete",verify=False,headers={"host":self.host})
class C12306Error(Exception):
def __init__(self, val):
self.value = val
def __str__(self):
str(self.value)
| {
"repo_name": "qhgongzi/xilinTicketV2",
"path": "c12306.py",
"copies": "1",
"size": "13728",
"license": "bsd-3-clause",
"hash": -5554569168633279000,
"line_mean": 43.2376237624,
"line_max": 137,
"alpha_frac": 0.6261563712,
"autogenerated": false,
"ratio": 3.290940338816597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9330107690138727,
"avg_score": 0.017397803975573934,
"num_lines": 303
} |
__author__ = 'Administrator'
import scrapy
from scrapy.spider import BaseSpider
from tutorial.items import NjuptItem
import logging
class njuptSpider(BaseSpider):
name = "njupt"
allowed_domains = ["njupt.edu.cn"]
start_urls = [
"http://news.njupt.edu.cn/s/222/t/1100/p/1/c/6866/i/1/list.htm",
]
def parse(self, response):
news_page_num = 14
page_num = 408
if response.status == 200:
for i in range(2,page_num+1):
for j in range(1,news_page_num+1):
item = NjuptItem()
item['news_url'],item['news_title'],item['news_date'] = response.xpath(
"//div[@id='newslist']/table[1]/tr["+str(j)+"]//a/font/text()"
"|//div[@id='newslist']/table[1]/tr["+str(j)+"]//td[@class='postTime']/text()"
"|//div[@id='newslist']/table[1]/tr["+str(j)+"]//a/@href").extract()
yield item
next_page_url = "http://news.njupt.edu.cn/s/222/t/1100/p/1/c/6866/i/"+str(i)+"/list.htm"
yield scrapy.Request(next_page_url,callback=self.parse_news)
def parse_news(self, response):
news_page_num = 14
if response.status == 200:
for j in range(1,news_page_num+1):
item = NjuptItem()
item['news_url'],item['news_title'],item['news_date'] = response.xpath(
"//div[@id='newslist']/table[1]/tr["+str(j)+"]//a/font/text()"
"|//div[@id='newslist']/table[1]/tr["+str(j)+"]//td[@class='postTime']/text()"
"|//div[@id='newslist']/table[1]/tr["+str(j)+"]//a/@href").extract()
yield item | {
"repo_name": "xiaokaizh/PythonLearn",
"path": "tutorial/tutorial/spiders/schoolnews.py",
"copies": "1",
"size": "1705",
"license": "apache-2.0",
"hash": -563866569022816830,
"line_mean": 37.7727272727,
"line_max": 104,
"alpha_frac": 0.5184750733,
"autogenerated": false,
"ratio": 3.2109227871939736,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.91934459837377,
"avg_score": 0.007190375351254691,
"num_lines": 44
} |
__author__ = 'Administrator'
import traceback
import json
import time
from nova import exception
from nova.openstack.common import loopingcall
from nova.openstack.common import log as logging
from hwcloud.database_manager import DatabaseManager
import nova.virt.hws.wormhole_business
from nova.virt.hws.wormhole_business import WormHoleBusinessHWS
from nova.virt.hws.driver import logger_helper
LOG = logging.getLogger(__name__)
from nova.virt.hws.driver import HwsComputeDriver, CONF
CONTAINER_FORMAT_HYBRID_VM = 'hybridvm'
class HyperVMDriver(HwsComputeDriver):
def __init__(self, virtapi):
super(HyperVMDriver, self).__init__(virtapi)
def _get_container_format_from_instance(self, instance):
return instance.system_metadata.get('image_container_format')
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
container_format = self._get_container_format_from_instance(instance)
LOG.debug('container_format: %s' % container_format)
if container_format == CONTAINER_FORMAT_HYBRID_VM:
self._spawn_hyper_vm(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
else:
super(HyperVMDriver, self).spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
def _spawn_hyper_vm(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
LOG.debug('start to _spawn_hyper_vm')
bdms = block_device_info.get('block_device_mapping', [])
self._binding_host(context, network_info, instance.uuid)
if not instance.image_ref and len(bdms) > 0:
volume_ids, bootable_volume_id = self._get_volume_ids_from_bdms(bdms)
if bootable_volume_id:
db_manager = DatabaseManager()
cascaded_volume_id = db_manager.get_cascaded_volume_id(bootable_volume_id)
# if cascaded volume already been created, then the data maybe changed, so cann't be created from image.
if cascaded_volume_id:
LOG.info('Cascaded volume exist, need to transfer to image then create server from new image')
cascaded_backup_id = self.get_cascaded_volume_backup(bootable_volume_id)
# if cascaded_backup_id exist, means last time execute may fail, need to go on with last time job.
if cascaded_backup_id:
self.create_server_from_backup(cascaded_backup_id, bootable_volume_id, instance)
else:
cascaded_backup_id = self.create_backup_from_volume(self.project, bootable_volume_id, cascaded_volume_id)
self.create_server_from_backup(cascaded_backup_id, bootable_volume_id, instance)
# if cascaded volume not exist, the data is the same between image and volume,
# so we can create server from image.
else:
LOG.info('Cascaded volume not exist, create server from image directly')
image_id = self._get_volume_source_image_id(context, bootable_volume_id)
instance.image_ref = image_id
self._spawn_from_image(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
else:
raise Exception('No bootable volume for created server')
else:
self._spawn_from_image_for_hyper_vm(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
self._binding_host(context, network_info, instance.uuid)
def _spawn_from_image_for_hyper_vm(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
LOG.debug('start to _spawn_from_image_for_hyper_vm')
flavor = self._get_cascaded_flavor_id(instance)
image_id = self._get_cascaded_image_id(instance)
server_name = self._get_display_name(instance)
vpc_id = CONF.hws.vpc_id
subnet_id_list = self._get_subnets()
project_id = CONF.hws.project_id
root_volume_type = "SATA"
az = CONF.hws.resource_region
personality_path, file_content = self._get_personality_data(instance)
sg_list = self._get_sg_list()
LOG.debug('security_group: %s' % sg_list)
try:
created_job = self.hws_client.ecs.create_server(project_id, image_id, flavor,
server_name, vpc_id, subnet_id_list, root_volume_type,
availability_zone=az,
personality_path=personality_path,
personality_contents=file_content,
adminPass='Huawei@CLOUD8!',
security_groups=sg_list)
job_status = created_job["status"]
if job_status != 200:
job_info = json.dumps(created_job)
error_info = 'HWS Create Server Error, EXCEPTION: %s' % created_job
LOG.error(error_info)
raise Exception(error_info)
except Exception:
raise exception.VirtualInterfaceCreateException(traceback.format_exc())
job_id = created_job['body']['job_id']
def _wait_for_boot():
"""Called at an interval until the VM is running."""
job_current_info = self.hws_client.ecs.get_job_detail(project_id, job_id)
if job_current_info and job_current_info['status'] == 200:
job_status_ac = job_current_info['body']['status']
if job_status_ac == 'SUCCESS':
server_id = job_current_info['body']['entities']['sub_jobs'][0]["entities"]['server_id']
LOG.info('Add hws server id: %s' % server_id)
if server_id:
LOG.info('HWS add server id mapping, cascading id: %s, cascaded id: %s' %
(instance.uuid, server_id))
db_manager = DatabaseManager()
db_manager.add_server_id_mapping(instance.uuid, server_id)
db_manager.add_server_id_name_mapping(instance.uuid, server_name)
else:
error_info = 'No server id found for cascading id: %s, server: %s' % (instance.uuid, server_name)
LOG.error(error_info)
raise Exception('HWS Create Server Error, EXCEPTION: %s' % error_info)
raise loopingcall.LoopingCallDone()
elif job_status_ac == 'FAIL':
error_info = json.dumps(job_current_info)
LOG.error('HWS Create Server Error, EXCEPTION: %s' % error_info)
raise Exception(error_info)
elif job_status_ac == "RUNNING":
LOG.debug('Job for creating server: %s is still RUNNING.' % server_name)
pass
elif job_status_ac == "INIT":
LOG.debug('JOB for creating server: %s is INIT' % server_name)
pass
else:
LOG.debug('JOB status is %s' % job_status_ac)
pass
elif job_current_info and job_current_info['status'] == 'error':
try:
self._deal_java_error(job_current_info)
except Exception, e:
pass
elif not job_current_info:
pass
else:
error_info = json.dumps(job_current_info)
# log.error('HWS Create Server Error, EXCEPTION: %s' % error_info)
raise Exception(error_info)
timer = loopingcall.FixedIntervalLoopingCall(_wait_for_boot)
timer.start(interval=5).wait()
db_m = DatabaseManager()
provider_server_id = db_m.get_cascaded_server_id(instance.uuid)
provider_server = self.hws_client.ecs.get_detail(self.project, provider_server_id)
self._create_hyper_service_container(context, instance, provider_server,
network_info, block_device_info,
image_meta['name'], injected_files, image_meta,admin_password)
def _create_hyper_service_container(self, context, instance, provider_server,
network_info, block_device_info,
image_name, injected_files, image_meta, admin_password):
LOG.debug('instance: %s' % instance)
LOG.debug('provider_server: %s' % provider_server)
LOG.debug('network_info: %s' % network_info)
LOG.debug('block_device_info: %s' % block_device_info)
LOG.debug('image_name: %s' % image_name)
LOG.debug('injected_files: %s' % injected_files)
instance.metadata['is_hybrid_vm'] = True
instance.save()
# update port bind host
LOG.debug('Start to binding host')
self._binding_host(context, network_info, instance.uuid)
port = CONF.hws.hybrid_service_port
wormhole_business_hws = WormHoleBusinessHWS(provider_server, self.hws_client, port)
LOG.debug('start to get version')
try:
docker_version = wormhole_business_hws.get_version()
except Exception, e:
error_info = 'docker server is not up, create docker app failed, exception: %s' %\
traceback.format_exc(e)
raise Exception(error_info)
try:
LOG.debug('Start to inject file')
file_data = self._get_inject_file_data(instance)
inject_result = wormhole_business_hws.inject_file(CONF.hws.personality_path, file_data=file_data)
LOG.debug('inject_file result: %s' % inject_result)
except Exception, e:
error_info = 'inject file failed, exception: %s' % traceback.format_exc(e)
LOG.error(error_info)
raise Exception(error_info)
import time;time.sleep(20)
try:
image_uuid = self._get_image_id_from_meta(image_meta)
LOG.debug('Start to create container by using image: %s' % image_name)
created_container_task = wormhole_business_hws.create_container(image_name, image_uuid, injected_files,
admin_password, network_info,
block_device_info)
LOG.debug('created_container: %s' % created_container_task)
wormhole_business_hws.wait_for_task_finish(created_container_task)
except Exception, e:
e_info = 'create container failed, exception: %s' % traceback.format_exc(e)
LOG.error(e_info)
raise Exception(e_info)
try:
LOG.info('network_info: %s' % network_info)
LOG.info('block device info: %s' % block_device_info)
LOG.debug('Star to start container.')
started_container_task = wormhole_business_hws.start_container(network_info, block_device_info)
LOG.debug('start container: %s' % started_container_task)
except Exception, e:
LOG.error('start container failed:%s' % traceback.format_exc(e))
raise Exception('start container failed:%s' % traceback.format_exc(e))
LOG.debug('Start to binding host')
self._binding_host(context, network_info, instance.uuid)
@logger_helper()
def _get_image_id_from_meta(self, image_meta):
if 'id' in image_meta:
# create from image
return image_meta['id']
elif 'image_id' in image_meta:
# attach
return image_meta['image_id']
elif 'properties' in image_meta:
# create from volume
return image_meta['properties']['image_id']
else:
return None
def _create_volume(self, volume_name, project_id, size, volume_type, availability_zone):
job_info = self.hws_client.evs.create_volume(project_id, availability_zone,
size, volume_type, name=volume_name)
self._deal_with_job(job_info, project_id)
@logger_helper()
def _get_inject_file_data(self, instance):
rabbit_host = CONF.hws.rabbit_host_ip
if not rabbit_host:
raise ValueError('rabbit host is None' +
' please config it in /etc/nova/nova-compute.conf, ' +
'hypernode_api section, my_ip option')
LOG.info('rabbit_host: %s' % rabbit_host)
LOG.info('host: %s' % instance.uuid)
file_data = 'rabbit_userid=%s\nrabbit_password=%s\nrabbit_host=%s\n' % \
(CONF.rabbit_userid, CONF.rabbit_password, rabbit_host)
file_data += 'host=%s\ntunnel_cidr=%s\nroute_gw=%s\n' % \
(instance.uuid, CONF.hws.tunnel_cidr,CONF.hws.route_gw)
LOG.info('end to composite user data: %s' % file_data)
return file_data
def power_off(self, instance, timeout=0, retry_interval=0):
LOG.debug('instance: %s' % instance)
container_format = self._get_container_format_from_instance(instance)
LOG.debug('container_format: %s' % container_format)
if container_format == CONTAINER_FORMAT_HYBRID_VM:
self._power_off_for_hybrid_vm(instance, timeout, retry_interval)
else:
super(HyperVMDriver, self).power_off(instance, timeout, retry_interval)
@logger_helper()
def _power_off_for_hybrid_vm(self, instance, timeout=0, retry_interval=0):
project_id = CONF.hws.project_id
provider_server_id = self._get_cascaded_server_id(instance)
provider_server = self.hws_client.ecs.get_detail(self.project, provider_server_id)
if provider_server_id and self._is_hws_server_exist(provider_server):
port = CONF.hws.hybrid_service_port
wormhole_business_hws = WormHoleBusinessHWS(provider_server, self.hws_client, port)
try:
wormhole_business_hws.get_version()
except Exception, e:
LOG.debug('hyper service is not online, stop base vm directlly.')
LOG.debug('start to stop server')
stop_result = self.hws_client.ecs.stop_server(project_id, provider_server_id)
self._deal_java_error(stop_result)
self._wait_for_hws_server_in_SHUTOFF(provider_server_id)
LOG.debug('start to stop container')
stop_result = wormhole_business_hws.stop_container()
time.sleep(0.5)
LOG.debug('start to stop server')
stop_result = self.hws_client.ecs.stop_server(project_id, provider_server_id)
self._deal_java_error(stop_result)
self._wait_for_hws_server_in_SHUTOFF(provider_server_id)
LOG.info('Stop Server: %s' % instance.display_name)
else:
error_info = 'cascaded server id is not exist for cascading server: %s.' % instance.display_name
LOG.error(error_info)
raise exception.NovaException(error_info)
@logger_helper()
def _wait_for_hws_server_in_ACTIVE(self, hws_server_id):
return self._wait_for_hws_server_in_specified_status(hws_server_id, ['ACTIVE'], ['ERROR'], 2, 60)
@logger_helper()
def _wait_for_hws_server_in_SHUTOFF(self, hws_server_id):
return self._wait_for_hws_server_in_specified_status(hws_server_id, ['SHUTOFF'], ['ERROR'], 2, 60)
@logger_helper()
def _wait_for_hws_server_in_specified_status(self, hws_server_id, status_list, error_status_list, sleep_interval,
loop_times):
is_in_specified_status = False
for i in range(loop_times):
hws_server = self.hws_client.ecs.get_detail(self.project, hws_server_id)
if self._is_hws_server_exist(hws_server):
hws_server_status = hws_server['body']['server']['status']
if hws_server_status in status_list:
is_in_specified_status = True
LOG.debug('server status is OK in: %s, break.....' % hws_server_status)
break
elif hws_server_status in error_status_list:
error_info = 'server: %s is in invalid status: %s' % (hws_server_id, hws_server_status)
LOG.error(error_info)
raise Exception(error_info)
else:
is_in_specified_status = False
LOG.debug('hws status is in: %s, retry time: %s' % (hws_server_status, str(i)))
time.sleep(sleep_interval)
continue
else:
error_info = 'hws server %s is not exist' % hws_server_id
LOG.error(error_info)
raise Exception(error_info)
return is_in_specified_status
@logger_helper()
def _is_hws_server_exist(self, provider_server):
is_exist = False
response_code = provider_server['status']
if response_code == 200:
is_exist = True
return is_exist
@logger_helper()
def power_on(self, context, instance, network_info,
block_device_info=None):
container_format = self._get_container_format_from_instance(instance)
LOG.debug('container_format: %s' % container_format)
if container_format == CONTAINER_FORMAT_HYBRID_VM:
self._power_on_for_bybrid_vm(context, instance, network_info, block_device_info)
else:
super(HyperVMDriver, self).power_off(context, instance, network_info, block_device_info)
@logger_helper()
def _power_on_for_bybrid_vm(self, context, instance, network_info, block_device_info):
LOG.debug('start to _power_on_for_bybrid_vm')
provider_server_id = self._get_cascaded_server_id(instance)
provider_server = self.hws_client.ecs.get_detail(self.project, provider_server_id)
if provider_server_id and self._is_hws_server_exist(provider_server):
port = CONF.hws.hybrid_service_port
wormhole_business_hws = WormHoleBusinessHWS(provider_server, self.hws_client, port)
LOG.debug('start to start server')
start_result = self.hws_client.ecs.start_server(self.project, provider_server_id)
self._deal_java_error(start_result)
self._wait_for_hws_server_in_ACTIVE(provider_server_id)
LOG.debug('start to start container')
try:
docker_version = wormhole_business_hws.get_version()
except Exception, e:
LOG.error('hyper service is not online, no need to start container')
docker_version = None
if docker_version:
start_result = wormhole_business_hws.start_container(network_info, block_device_info)
LOG.info('Start Server: %s, result is: %s' % (instance.display_name, start_result))
else:
error_info = 'cascaded server id is not exist for cascading server: %s.' % instance.display_name
LOG.error(error_info)
raise exception.NovaException(error_info)
@logger_helper()
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
container_format = self._get_container_format_from_instance(instance)
LOG.debug('container_format: %s' % container_format)
if container_format == CONTAINER_FORMAT_HYBRID_VM:
self._reboot_for_hybridvm(context, instance, network_info, reboot_type,
block_device_info, bad_volumes_callback)
else:
super(HyperVMDriver, self).reboot(context, instance, network_info, reboot_type,
block_device_info, bad_volumes_callback)
@logger_helper()
def _reboot_for_hybridvm(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
provider_server_id = self._get_cascaded_server_id(instance)
if provider_server_id:
provider_server = self.hws_client.ecs.get_detail(self.project, provider_server_id)
if self._is_hws_server_exist(provider_server):
port = CONF.hws.hybrid_service_port
wormhole_business_hws = WormHoleBusinessHWS(provider_server, self.hws_client, port)
if self._is_hws_server_active(provider_server):
# if hws server is active, only need to restart container, no need to restart base vm.
docker_version = wormhole_business_hws.get_version()
wormhole_business_hws.restart_container(network_info, block_device_info)
elif self._is_hws_server_shutoff(provider_server):
# if hws server is SHUTOFF, we need to start base vm, and start container.
LOG.debug('start to start server')
start_result = self.hws_client.ecs.start_server(self.project, provider_server_id)
self._deal_java_error(start_result)
self._wait_for_hws_server_in_ACTIVE(provider_server_id)
LOG.debug('start to start container')
start_result = wormhole_business_hws.start_container(network_info, block_device_info)
else:
LOG.debug('hws server status is: %s, not support to reboot.' % provider_server['body']['server']['status'])
else:
LOG.debug('provider server is not exist for server id: %s' % provider_server_id)
else:
LOG.debug('No provider server mapped with instance: %s' % instance.uuid)
@logger_helper()
def _is_hws_server_active(self, hws_server):
is_active = False
server_status = hws_server['body']['server']['status']
if server_status == 'ACTIVE':
is_active = True
return is_active
@logger_helper()
def _is_hws_server_shutoff(self, hws_server):
is_active = False
server_status = hws_server['body']['server']['status']
if server_status == 'SHUTOFF':
is_active = True
return is_active
@logger_helper()
def _get_wormhole_business_hws(self, instance):
provider_server_id = self._get_cascaded_server_id(instance)
provider_server = self.hws_client.ecs.get_detail(self.project, provider_server_id)
port = CONF.hws.hybrid_service_port
wormhole_business_hws = WormHoleBusinessHWS(provider_server, self.hws_client, port)
return wormhole_business_hws
# def destroy(self, context, instance, network_info, block_device_info=None,
# destroy_disks=True, migrate_data=None):
# LOG.debug('driver delete server success. instance display_name: %s' % instance.display_name)
# return | {
"repo_name": "nash-x/hws",
"path": "nova/virt/hws/hyper_vm_driver.py",
"copies": "1",
"size": "23615",
"license": "apache-2.0",
"hash": 4671297391010652000,
"line_mean": 50.1168831169,
"line_max": 129,
"alpha_frac": 0.5875926318,
"autogenerated": false,
"ratio": 3.8605525584436817,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4948145190243682,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import yaml
import io
import h5py
import datetime as dt
import math
import numpy as np
import sys, getopt
from matplotlib.mlab import psd
class FrequencyUtility:
def __init__(self):
"""
Constructor
"""
def load_data(self, inputFileName):
with open(inputFileName, "r") as ins:
array = []
for line in ins:
d = float(line)
array.append(d)
values = np.zeros(len(array), np.float32)
i = 0
for item in array:
values[i] = item
i += 1
return array
def write_data(self, outputFileName, pxx, frequencies):
with open(outputFileName, "w") as outs:
i = 0
size = pxx.size
for x in range(0, size):
outputLine = str(frequencies[x]) + '|' + str(pxx[x]) + '\r\n'
outs.write(outputLine)
return
def process_psd(self, data, nfft=1024, audio_sampling_rate=96000):
return psd(data, nfft, audio_sampling_rate)
def array_from_bytes(self, data_chunk, sample_width, data_type):
data_length = len(data_chunk)
remainder = data_length % sample_width
if remainder == 0:
reading_count = data_length // sample_width
channel1 = np.zeros(reading_count, dtype=data_type)
current_position = 0
for x in range(0, reading_count):
byte_array = bytearray(sample_width)
bytearray.zfill(byte_array, sample_width)
for y in range(0, sample_width):
byte_array[y] = data_chunk[current_position]
current_position += 1
if data_type == np.int16 or data_type == np.int32:
channel1[x] = int.from_bytes(byte_array, byteorder='little', signed=True)
else:
channel1[x] = float.from_bytes(byte_array, byteorder='little', signed=True)
return {'Channel1': channel1 }
else:
return None
def main(argv):
inputfile = ''
outputfile = ''
try:
opts, args = getopt.getopt(argv,"hi:o:",["ifile=","ofile="])
except getopt.GetoptError:
print('PowerSpectrumDensityProcessor.py -i <inputfile> -o <outputfile>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('test.py -i <inputfile> -o <outputfile>')
sys.exit()
elif opt in ("-i", "--ifile"):
inputfile = arg
elif opt in ("-o", "--ofile"):
outputfile = arg
freqUtil = FrequencyUtility()
inputData = freqUtil.load_data(inputfile)
pxx, frequencies = freqUtil.process_psd(inputData, 1024, len(inputData))
freqUtil.write_data(outputfile, pxx, frequencies)
if __name__ == "__main__":
main(sys.argv[1:]) | {
"repo_name": "SidWatch/pyPSD",
"path": "src/PowerSpectrumDensityProcessor.py",
"copies": "1",
"size": "2916",
"license": "mit",
"hash": -8238202812672942000,
"line_mean": 27.0480769231,
"line_max": 95,
"alpha_frac": 0.5432098765,
"autogenerated": false,
"ratio": 3.8828229027962715,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9906202783612759,
"avg_score": 0.00396599913670252,
"num_lines": 104
} |
__author__ = 'Administrator'
from oslo.config import cfg
from heat.engine.resources.hwcloud.hws_service.ecs_service import ECSService
from heat.engine.resources.hwcloud.hws_service.evs_service import EVSService
from heat.engine.resources.hwcloud.hws_service.ims_service import IMSService
from heat.engine.resources.hwcloud.hws_service.vpc_service import VPCService
from heat.engine.resources.hwcloud.hws_service.vbs_service import VBSService
hws_opts = [cfg.StrOpt('ecs_host', help='ecs_host', default='ecs.cn-north-1.myhwclouds.com.cn'),
cfg.StrOpt('evs_host', help='evs_host', default='evs.cn-north-1.myhwclouds.com.cn'),
cfg.StrOpt('ims_host', help='ims_host', default='ims.cn-north-1.myhwclouds.com.cn'),
cfg.StrOpt('vpc_host', help='vpc_host', default='vpc.cn-north-1.myhwclouds.com.cn'),
cfg.StrOpt('vbs_host', help='vbs_host', default='vbs.cn-north-1.myhwclouds.com.cn')
]
CONF = cfg.CONF
hws_group = 'hws'
CONF.register_opts(hws_opts, hws_group)
class HWSClient(object):
def __init__(self, ak, sk, region, protocol, port):
self.ak = ak
self.sk = sk
self.protocol = protocol
self.port = port
self.region = region
self.ecs_host = CONF.hws.ecs_host
self.evs_host = CONF.hws.evs_host
self.ims_host = CONF.hws.ims_host
self.vpc_host = CONF.hws.vpc_host
self.vbs_host = CONF.hws.vbs_host
self.ecs = ECSService(ak, sk, self.region, self.protocol, self.ecs_host, self.port)
self.evs = EVSService(ak, sk, self.region, self.protocol, self.evs_host, self.port)
self.ims = IMSService(ak, sk, self.region, self.protocol, self.ims_host, self.port)
self.vpc = VPCService(ak, sk, self.region, self.protocol, self.vpc_host, self.port)
self.vbs = VBSService(ak, sk, self.region, self.protocol, self.vbs_host, self.port)
if __name__ == '__main__':
ak = '5DTFPKOQFEIN4T7EC2BM'
sk = '00JI0Zeoezqafr03bbWZ7pFc1b4Tw0R7A9oZlFsw'
region = 'cn-north-1'
protocol = 'https'
port = '443'
hws_client = HWSClient(ak, sk, region, protocol, port)
project_id = '91d957f0b92d48f0b184c26975d2346e'
server_id = '72194025-ce73-41a4-a6a4-9637cdf6a0b1'
image_id = '37ca2b35-6fc7-47ab-93c7-900324809c5c'
flavor_id = 'c1.medium'
vpc_id = '742cef84-512c-43fb-a469-8e9e87e35459'
subnet_id = '7bd9410f-38bb-4fbb-aa7a-cf4a22cb20f3'
subnet_id_list = [subnet_id]
root_volume_type = 'SATA'
availability_zone="cn-north-1a"
size = 120
hws_client.ecs.create_server(project_id, image_id, flavor_id, "hgq1", vpcid, nics_subnet_list, root_volume_type,)
# job_info = hws_client.evs.create_volume(project_id, availability_zone, size, root_volume_type, name='v_1')
# print job_info
# job_detail = hws_client.evs.get_job_detail(project_id, '8aace0c8523c082201523f215b0903b3')
# print job_detail
volume_id = '9dfd0600-f822-48fa-b831-f43d97135ee5'
backup_name = 'bk_1'
server_list = hws_client.ecs.list(project_id)
print server_list
# job_info = hws_client.vbs.create_backup(project_id, volume_id, backup_name)
# print(job_info)
# job_id = job_info['body']['job_id']
# job_detail = hws_client.vbs.get_job_detail(project_id, job_id) | {
"repo_name": "hgqislub/hybird-orchard",
"path": "code/hwcloud/hws_service/client.py",
"copies": "1",
"size": "3289",
"license": "apache-2.0",
"hash": -5947248428108573000,
"line_mean": 43.4594594595,
"line_max": 117,
"alpha_frac": 0.6752812405,
"autogenerated": false,
"ratio": 2.667477696674777,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3842758937174777,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from oslo.config import cfg
from hwcloud.hws_service.ecs_service import ECSService
from hwcloud.hws_service.evs_service import EVSService
from hwcloud.hws_service.ims_service import IMSService
from hwcloud.hws_service.vpc_service import VPCService
from hwcloud.hws_service.vbs_service import VBSService
hws_opts = [cfg.StrOpt('ecs_host', help='ecs_host', default='ecs.cn-north-1.myhwclouds.com.cn'),
cfg.StrOpt('evs_host', help='evs_host', default='evs.cn-north-1.myhwclouds.com.cn'),
cfg.StrOpt('ims_host', help='ims_host', default='ims.cn-north-1.myhwclouds.com.cn'),
cfg.StrOpt('vpc_host', help='vpc_host', default='vpc.cn-north-1.myhwclouds.com.cn'),
cfg.StrOpt('vbs_host', help='vbs_host', default='vbs.cn-north-1.myhwclouds.com.cn')
]
CONF = cfg.CONF
hws_group = 'hws'
CONF.register_opts(hws_opts, hws_group)
class HWSClient(object):
def __init__(self, ak, sk, region, protocol, port):
self.ak = ak
self.sk = sk
self.protocol = protocol
self.port = port
self.region = region
self.ecs_host = CONF.hws.ecs_host
self.evs_host = CONF.hws.evs_host
self.ims_host = CONF.hws.ims_host
self.vpc_host = CONF.hws.vpc_host
self.vbs_host = CONF.hws.vbs_host
self.ecs = ECSService(ak, sk, self.region, self.protocol, self.ecs_host, self.port)
self.evs = EVSService(ak, sk, self.region, self.protocol, self.evs_host, self.port)
self.ims = IMSService(ak, sk, self.region, self.protocol, self.ims_host, self.port)
self.vpc = VPCService(ak, sk, self.region, self.protocol, self.vpc_host, self.port)
self.vbs = VBSService(ak, sk, self.region, self.protocol, self.vbs_host, self.port)
if __name__ == '__main__':
ak = 'DQEDQVNGMIW7KZXWO1AX'
sk = 't4up1pD7KYs8Nj735aEcTQeYYJrnYjEQvO07L9Q0'
region = 'cn-north-1'
protocol = 'https'
port = '443'
hws_client = HWSClient(ak, sk, region, protocol, port)
project_id = '91d957f0b92d48f0b184c26975d2346e'
server_id = '72194025-ce73-41a4-a6a4-9637cdf6a0b1'
image_id = '37ca2b35-6fc7-47ab-93c7-900324809c5c'
flavor_id = 'c1.medium'
vpc_id = '742cef84-512c-43fb-a469-8e9e87e35459'
subnet_id = '7bd9410f-38bb-4fbb-aa7a-cf4a22cb20f3'
subnet_id_list = [subnet_id]
root_volume_type = 'SATA'
availability_zone="cn-north-1a"
size = 120
# job_info = hws_client.evs.create_volume(project_id, availability_zone, size, root_volume_type, name='v_1')
# print job_info
# job_detail = hws_client.evs.get_job_detail(project_id, '8aace0c8523c082201523f215b0903b3')
# print job_detail
volume_id = '9dfd0600-f822-48fa-b831-f43d97135ee5'
backup_name = 'bk_1'
server_list = hws_client.ecs.list(project_id)
print server_list
# job_info = hws_client.vbs.create_backup(project_id, volume_id, backup_name)
# print(job_info)
# job_id = job_info['body']['job_id']
# job_detail = hws_client.vbs.get_job_detail(project_id, job_id) | {
"repo_name": "nash-x/hws",
"path": "hwcloud/hws_service/client.py",
"copies": "1",
"size": "3058",
"license": "apache-2.0",
"hash": -8315673930292457000,
"line_mean": 42.7,
"line_max": 112,
"alpha_frac": 0.6674296926,
"autogenerated": false,
"ratio": 2.649913344887348,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8798994733119684,
"avg_score": 0.0036696608735328554,
"num_lines": 70
} |
__author__ = 'Administrator'
from PyQt5.QtWidgets import QTableWidget
from PyQt5 import QtWidgets
from PyQt5 import QtCore
from PyQt5 import QtGui
from configparser import ConfigParser
class TableWidget(QTableWidget):
def __init__(self, parent):
#QTableWidget.__init__(self, parent)
super(TableWidget,self).__init__(parent)
def create_menu(self):
'''''
创建右键菜单
'''
# 必须将ContextMenuPolicy设置为Qt.CustomContextMenu
# 否则无法使用customContextMenuRequested信号
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.showContextMenu)
# 设置表头宽度
self.setColumnWidth(0, 30)
self.setColumnWidth(1, 60)
self.setColumnWidth(2, 84)
self.setColumnWidth(3, 154)
self.setColumnWidth(4, 60)
# 创建QMenu
self.contextMenu = QtWidgets.QMenu(self)
self.actionAdd = self.contextMenu.addAction('添加')
self.actionDel = self.contextMenu.addAction('删除')
# 将动作与处理函数相关联
# 这里为了简单,将所有action与同一个处理函数相关联,
# 当然也可以将他们分别与不同函数关联,实现不同的功能
self.actionAdd.triggered.connect(self.action_add)
self.actionDel.triggered.connect(self.action_del)
def showContextMenu(self, pos):
'''''
右键点击时调用的函数
'''
# 菜单显示前,将它移动到鼠标点击的位置
self.contextMenu.move(QtGui.QCursor.pos())
self.contextMenu.show()
def action_add(self):
insert_row=self.rowCount()
self.insertRow(insert_row)
check_item=QtWidgets.QTableWidgetItem('')
check_item.setCheckState(QtCore.Qt.Checked)
ticketTypeItem=QtWidgets.QTableWidgetItem('成人票')
self.setItem(insert_row,0,check_item)
self.setItem(insert_row,4,ticketTypeItem)
def action_del(self):
select_list=self.selectedItems()
for item in select_list:
self.removeRow(item.row())
def selectedPassenger(self):
itemCount=self.rowCount()
passengerList=[]
for i in range(itemCount):
if self.item(i,0).checkState() ==QtCore.Qt.Checked:
ticketTypeStr=self.item(i,4).text()
if '儿童' in ticketTypeStr :
ticketType='2'
elif '学生' in ticketTypeStr:
ticketType='3'
elif '军残' in ticketTypeStr:
ticketType='4'
else:
ticketType='1'
passenger={'name':self.item(i,1).text(),'ticketType':ticketType,\
'telephone':self.item(i,2).text(),'idcard':self.item(i,3).text()}
passengerList.append(passenger)
return passengerList
def save_to_config(self):
itemCount=self.rowCount()
config=ConfigParser()
config.read('config.ini')
passengerConfig={'passenger':{'count':itemCount}}
for i in range(itemCount):
passengerConfig['passenger']['name'+str(i)]=self.item(i,1).text()
passengerConfig['passenger']['telephone'+str(i)]=self.item(i,2).text()
passengerConfig['passenger']['idcard'+str(i)]=self.item(i,3).text()
passengerConfig['passenger']['ticketType'+str(i)]=self.item(i,4).text()
passengerConfig['passenger']['check'+str(i)]=True if self.item(i,0).checkState()==QtCore.Qt.Checked else False
config.read_dict(passengerConfig)
with open('config.ini', 'w') as configfile:
config.write(configfile)
configfile.close()
def load_from_config(self):
config=ConfigParser()
config.read('config.ini')
num=config.getint('passenger','count')
for i in range(num):
self.insertRow(i)
check=config.getboolean('passenger','check'+str(i))
name=config.get('passenger','name'+str(i))
telephone=config.get('passenger','telephone'+str(i))
idcard=config.get('passenger','idcard'+str(i))
ticketType=config.get('passenger','ticketType'+str(i))
check_item=QtWidgets.QTableWidgetItem('')
check_item.setCheckState(QtCore.Qt.Checked if check else QtCore.Qt.Unchecked)
name_item=QtWidgets.QTableWidgetItem(name)
telephone_item=QtWidgets.QTableWidgetItem(telephone)
idcard_item=QtWidgets.QTableWidgetItem(idcard)
ticket_type_item=QtWidgets.QTableWidgetItem(ticketType)
self.setItem(i,0,check_item)
self.setItem(i,1,name_item)
self.setItem(i,2,telephone_item)
self.setItem(i,3,idcard_item)
self.setItem(i,4,ticket_type_item)
| {
"repo_name": "qhgongzi/xilinTicketV2",
"path": "tablewidget.py",
"copies": "1",
"size": "4964",
"license": "bsd-3-clause",
"hash": -61403526855462220,
"line_mean": 31.7083333333,
"line_max": 126,
"alpha_frac": 0.604670913,
"autogenerated": false,
"ratio": 3.3523131672597866,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9365391733257118,
"avg_score": 0.018318469400533766,
"num_lines": 144
} |
__author__ = 'Administrator'
from stompy.simple import Client
from stompy.stomp import *
def simple():
# 通过simple方式连接JMS服务器
# 指定hostname和port(tips:ActiveMQ支持多种协议连接stomp协议的默认端口为61613,这里不要写为61616)
stomp = Client('192.168.1.166', 61613)
#stomp = Client()#如果是ActiveMQ和ActiveMQ客户端(本程序)在同一台机器可使用默认值:hostname="localhost",port=61613
# 连接服务器
stomp.connect()
# 发送消息到指定的queue
stomp.put("The quick brown fox...", destination="/queue/hello")
# 从指定的queue订阅消息。ack参数指定为"client",不然可能出现一个问题(具体忘了,以后补充),ack默认值为"auto"
stomp.subscribe("/queue/hello",ack="client")
# 等待接收ActiveMQ推送的消息
message = stomp.get()
# 打印消息的主体
print message.body
message.body
'quick brown fox...'
stomp.ack(message)
# 退订
stomp.unsubscribe("/queue/hello")
# 关闭连接
stomp.disconnect()
def simple_receive():
stomp = Client('192.168.1.166', 61613)
stomp.connect()
stomp.subscribe("/queue/hello",ack="client")
while True:
message = stomp.get()
print message.body
stomp.ack(message)
stomp.unsubscribe("/queue/hello")
stomp.disconnect()
def stomp():
stomp = Stomp('192.168.1.166', 61613)
stomp.connect()
stomp.send({'destination': '/queue/hello', 'body': 'Testing', 'persistent': 'true'})
stomp.subscribe({'destination':'/queue/hello', 'ack':'client'})
frame = stomp.receive_frame()
print(frame.headers['message-id'])
print frame.body
stomp.ack(frame)
stomp.unsubscribe({'destination': '/queue/hello'})
stomp.disconnect()
def stomp_receive():
stomp = Stomp('192.168.1.166', 61613)
stomp.connect()
stomp.subscribe({'destination':'/queue/hello', 'ack':'client'})
while True:
frame = stomp.receive_frame()
print(frame.headers['message-id'])
print frame.body
stomp.ack(frame)
stomp.unsubscribe({'destination': '/queue/hello'})
stomp.disconnect()
simple()
#simple_receive()
#stomp()
#stomp_receive() | {
"repo_name": "SainTestProject/stompy_link_AMQ",
"path": "stompy_link_AMQ/connectAMQ.py",
"copies": "1",
"size": "2245",
"license": "apache-2.0",
"hash": -4601981624397923300,
"line_mean": 27.2,
"line_max": 94,
"alpha_frac": 0.6523061328,
"autogenerated": false,
"ratio": 2.559014267185473,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3711320399985473,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from urlparse import urljoin
import json
import time
#from hwcloud import LOG
from heat.openstack.common import log as logging
LOG = logging.getLogger(__name__)
from heat.engine.resources.hwcloud.java_gateway import HWSRestMethod
def retry(times, interval):
def _wrapper(f):
def __wrapper(*args, **kwargs):
timer = 0
while(True):
response = f(*args, **kwargs)
if not str(response['status']).startswith('20'):
if timer < times:
timer += 1
time.sleep(interval)
LOG.debug('retry time: %s' % timer)
continue
else:
return response
else:
return response
return __wrapper
return _wrapper
RETRY_TIMES = 10
#interval seconds
RETRY_INTERVAL = 0.5
class HWSService(object):
def __init__(self, ak, sk, service_name, region, protocol, host, port):
self.ak = ak
self.sk = sk
self.service_name = service_name
self.region = region
self.protocol = protocol
self.host = host
self.port = port
self.uri_prefix = self.get_url_prefix()
def get_url_prefix(self):
return ":".join(["://".join([self.protocol, self.host]), self.port])
def composite_full_uri(self, api_uri):
return urljoin(self.uri_prefix, api_uri)
@retry(RETRY_TIMES, RETRY_INTERVAL)
def get(self, uri):
request_url = self.composite_full_uri(uri)
return json.loads(HWSRestMethod.get(self.ak, self.sk, request_url, self.service_name, self.region))
@retry(RETRY_TIMES, RETRY_INTERVAL)
def post(self, uri, body):
request_url = self.composite_full_uri(uri)
return json.loads(HWSRestMethod.post(self.ak, self.sk, request_url, body, self.service_name, self.region))
@retry(RETRY_TIMES, RETRY_INTERVAL)
def put(self, uri, body):
request_url = self.composite_full_uri(uri)
return json.loads(HWSRestMethod.put(self.ak, self.sk, request_url, body, self.service_name, self.region))
@retry(RETRY_TIMES, RETRY_INTERVAL)
def delete(self, uri):
request_url = self.composite_full_uri(uri)
return json.loads(HWSRestMethod.delete(self.ak, self.sk, request_url, self.service_name, self.region))
def convertDictOptsToString(self, opts):
str_opts = ""
for key, value in opts.items():
this_opt = "=".join([key, value])
if str_opts == "":
str_opts = this_opt
else:
str_opts = "&".join([str_opts, this_opt])
return str_opts
def get_job_detail(self, project_id, job_id):
uri = '/v1/%s/jobs/%s' % (project_id, job_id)
return self.get(uri) | {
"repo_name": "hgqislub/hybird-orchard",
"path": "code/hwcloud/hws_service/__init__.py",
"copies": "1",
"size": "2879",
"license": "apache-2.0",
"hash": 5799749079120495000,
"line_mean": 31.7272727273,
"line_max": 114,
"alpha_frac": 0.5811045502,
"autogenerated": false,
"ratio": 3.6910256410256412,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47721301912256414,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from urlparse import urljoin
import json
import time
from hwcloud import LOG
from hwcloud.java_gateway import HWSRestMethod
def retry(times, interval):
def _wrapper(f):
def __wrapper(*args, **kwargs):
timer = 0
while(True):
response = f(*args, **kwargs)
if not str(response['status']).startswith('20'):
if timer < times:
timer += 1
time.sleep(interval)
LOG.debug('retry time: %s' % timer)
continue
else:
return response
else:
return response
return __wrapper
return _wrapper
RETRY_TIMES = 10
#interval seconds
RETRY_INTERVAL = 0.5
class HWSService(object):
def __init__(self, ak, sk, service_name, region, protocol, host, port):
self.ak = ak
self.sk = sk
self.service_name = service_name
self.region = region
self.protocol = protocol
self.host = host
self.port = port
self.uri_prefix = self.get_url_prefix()
def get_url_prefix(self):
return ":".join(["://".join([self.protocol, self.host]), self.port])
def composite_full_uri(self, api_uri):
return urljoin(self.uri_prefix, api_uri)
@retry(RETRY_TIMES, RETRY_INTERVAL)
def get(self, uri):
request_url = self.composite_full_uri(uri)
return json.loads(HWSRestMethod.get(self.ak, self.sk, request_url, self.service_name, self.region))
@retry(RETRY_TIMES, RETRY_INTERVAL)
def post(self, uri, body):
request_url = self.composite_full_uri(uri)
return json.loads(HWSRestMethod.post(self.ak, self.sk, request_url, body, self.service_name, self.region))
@retry(RETRY_TIMES, RETRY_INTERVAL)
def delete(self, uri):
request_url = self.composite_full_uri(uri)
return json.loads(HWSRestMethod.delete(self.ak, self.sk, request_url, self.service_name, self.region))
def convertDictOptsToString(self, opts):
str_opts = ""
for key, value in opts.items():
this_opt = "=".join([key, value])
if str_opts == "":
str_opts = this_opt
else:
str_opts = "&".join([str_opts, this_opt])
return str_opts
def get_job_detail(self, project_id, job_id):
uri = '/v1/%s/jobs/%s' % (project_id, job_id)
return self.get(uri) | {
"repo_name": "nash-x/hws",
"path": "hwcloud/hws_service/__init__.py",
"copies": "1",
"size": "2536",
"license": "apache-2.0",
"hash": -3089987130460696600,
"line_mean": 30.7125,
"line_max": 114,
"alpha_frac": 0.5638801262,
"autogenerated": false,
"ratio": 3.7514792899408285,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48153594161408286,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import json
from heat.engine.resources.hwcloud.hws_service import HWSService
class EVSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(EVSService, self).__init__(ak, sk, 'EVS', region, protocol, host, port)
def list(self, project_id, opts=None):
uri = '/v2/%s/cloudvolumes' % project_id
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def create_volume(self, project_id, availability_zone, size, volume_type,
backup_id=None, description=None, name=None, imageRef=None, count=None):
"""
{
"volume": {
"backup_id": null,
"count": 1,
"availability_zone": "az1.dc1",
"description": "test_volume_1",
"size": 120,
"name": "test_volume_1",
"imageRef": null,
"volume_type": "SSD"
}
}
:param project_id:
:param availability_zone:
:param size:
:param volume_type:
:param backup_id:
:param description:
:param name:
:param imageRef:
:param count:
:return: dict
{
"job_id": "70a599e0-31e7-49b7-b260-868f441e862b",
}
or
{
"error": {
"message": "XXXX",
"code": "XXX"
}
}
Get job detail result:
{
u'body': {
u'status': u'RUNNING',
u'fail_reason': None,
u'job_id': u'8aace0c651b0a02301521ae1f96c6138',
u'job_type': u'createVolume',
u'entities': {
u'volume_id': u'9bd6fa88-0e60-48e5-ae61-7e028dbdf045'
},
u'end_time': u'',
u'begin_time': u'2016-01-07T06: 59: 23.115Z',
u'error_code': None
},
u'status': 200
}
{
u'body': {
u'status': u'SUCCESS',
u'fail_reason': None,
u'job_id': u'8aace0c651b0a02301521ae1f96c6138',
u'job_type': u'createVolume',
u'entities': {
u'volume_id': u'9bd6fa88-0e60-48e5-ae61-7e028dbdf045'
},
u'end_time': u'2016-01-07T06: 59: 48.279Z',
u'begin_time': u'2016-01-07T06: 59: 23.115Z',
u'error_code': None
},
u'status': 200
}
Failed job result:
{
u'body': {
u'status': u'FAIL',
u'fail_reason': u"EbsCreateVolumeTask-fail:badRequest: Invalid input received: Availability zone 'cn-north-1' is invalid",
u'job_id': u'8aace0c651b0a02301521ab7e58660ca',
u'job_type': u'createVolume',
u'entities': {
},
u'end_time': u'2016-01-07T06: 13: 25.809Z',
u'begin_time': u'2016-01-07T06: 13: 25.509Z',
u'error_code': u'EVS.5400'
},
u'status': 200
}
"""
uri = '/v2/%s/cloudvolumes' % project_id
request_body_dict = {}
volume = {}
volume['availability_zone'] = availability_zone
volume['size'] = size
volume['volume_type'] = volume_type
if backup_id:
volume['backup_id'] = backup_id
if description:
volume['description'] = description
if name:
volume['name'] = name
if imageRef:
volume['imageRef'] = imageRef
if count:
volume['count'] = count
request_body_dict['volume'] = volume
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def delete_volume(self, project_id, volume_id):
"""
DELETE /v2/{tenant_id}/cloudvolumes/{volume_id}
:return:
"""
uri = '/v2/%s/cloudvolumes/%s' % (project_id, volume_id)
response = self.delete(uri)
return response
def get_volume_detail(self, project_id, volume_id):
uri = "/v2/%s/volumes/%s" % (project_id, volume_id)
response = self.get(uri)
return response | {
"repo_name": "hgqislub/hybird-orchard",
"path": "code/hwcloud/hws_service/evs_service.py",
"copies": "1",
"size": "4462",
"license": "apache-2.0",
"hash": -8501975692460959000,
"line_mean": 28.9530201342,
"line_max": 138,
"alpha_frac": 0.4784849843,
"autogenerated": false,
"ratio": 3.6071139854486662,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4585598969748666,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import json
from heat.engine.resources.hwcloud.hws_service import HWSService
class ECSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(ECSService, self).__init__(ak, sk, 'ECS', region, protocol, host, port)
def list(self, project_id, opts=None):
"""
:param project_id: string
:param opts: dict
:return:
{
u'body': {
u'servers': [{
u'id': u'817187bd-7691-408b-a78e-0bb0e8407cd6',
u'links': [{
u'href': u'https: //compute.region.cnnorth1.hwclouds.com/v2/91d957f0b92d48f0b184c26975d2346e/servers/817187bd-7691-408b-a78e-0bb0e8407cd6',
u'rel': u'self'
},
{
u'href': u'https: //compute.region.cnnorth1.hwclouds.com/91d957f0b92d48f0b184c26975d2346e/servers/817187bd-7691-408b-a78e-0bb0e8407cd6',
u'rel': u'bookmark'
}],
u'name': u's_server_01'
}]
},
u'status': 200
}
"""
uri = "v2/%s/servers" % project_id
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def list_detail(self, project_id, opts=None):
uri = "/v2/%s/servers/detail" % project_id
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def get_detail(self, project_id, server_id):
"""
:param project_id:
:param server_id:
:return:
"""
uri = '/v2/%s/servers/%s' % (project_id, server_id)
return self.get(uri)
def create_server(self, project_id, image_ref, flavor_ref, name, vpcid, nics_subnet_list, root_volume_type,availability_zone
,personality_path=None, personality_contents=None, adminPass=None, public_ip_id=None, count=None,
data_volumes=None, security_groups=None, key_name=None):
"""
Rest method: POST
Uri for create server: /v1/{project_id}/cloudservers
Request body of create server in hws is as following:
{
"server": {
"availability_zone": "az1.dc1",
"name": "newserver",
"imageRef": "imageid",
"root_volume": {
"volumetype": "SATA"
},
"data_volumes": [
{
"volumetype": "SATA",
"size": 100
},
{
"volumetype": "SSD",
"size": 100
}
],
"flavorRef": " 1",
"personality": [
{
"path": "/etc/banner.txt",
"contents": "ICAgICAgDQoiQmFjaA=="
}
],
"vpcid": "vpcid",
"security_groups": [
{
"id": "securitygroupsid"
}
],
"nics": [
{
"subnet_id": "subnetid "
}
],
"publicip": {
"id": "publicipid"
},
"key_name": "keyname",
"adminPass": "password",
"count": 1
}
}
:param project_id: string
:param imageRef: string
:param flavorRef: string
:param name: string
:param vpcid: string
:param nics_subnet_list: list of subnet_id, ['subnet_id_01', 'subnet_id_02']
:param root_volume_type: string
:param personality_path: string
:param personality_contents: string
:param adminPass: string
:param public_ip_id: string
:param count: int
:param data_volumes: list
[
{
"volumetype": "SATA",
"size": 100
},
{
"volumetype": "SSD",
"size": 100
}
]
:param security_groups: list of security group id, e.g. ['sg_id_01', 'sg_id_02']
:param availability_zone: string
:param key_name: string
:return:
{
u'body': {
u'job_id': u'8aace0c851b0a3c10151eca2b4183764'
},
u'status': 200
}
"""
uri = "/v1/%s/cloudservers" % project_id
request_body_dict = {}
request_server_body = {}
request_server_body['imageRef'] = image_ref
request_server_body['flavorRef'] = flavor_ref
request_server_body['name'] = name
request_server_body['vpcid'] = vpcid
request_server_body['nics'] = nics_subnet_list
if adminPass:
request_server_body['adminPass'] = adminPass
if count:
request_server_body['count'] = count
if personality_path and personality_contents:
personality = {}
personality['path'] = personality_path
personality['contents'] = personality_contents
request_server_body['personality'] = personality
if public_ip_id:
public_ip_dict = {}
public_ip_dict["id"] = public_ip_id
request_server_body['publicip'] = public_ip_dict
if root_volume_type:
root_volume_dict = {}
root_volume_dict['volumetype'] = root_volume_type
request_server_body['root_volume'] = root_volume_dict
if data_volumes:
request_server_body['data_volumes'] = data_volumes
if security_groups:
security_group_list = []
for security_group_id in security_groups:
security_group_dict = {}
security_group_dict['id'] = security_group_id
security_group_list.append(security_group_dict)
request_server_body['security_groups'] = security_group_list
if availability_zone:
request_server_body['availability_zone'] = availability_zone
if key_name:
request_server_body['key_name'] = key_name
request_body_dict['server'] = request_server_body
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def list_flavors(self, project_id):
"""
:param project_id: string
:return: dict
{
"body":{
"flavors": [
{
"id": "104",
"name": "m1.large",
"vcpus": "4",
"ram": 8192,
"disk": "80",
"swap": "",
"OS-FLV-EXT-DATA:ephemeral": 0,
"rxtx_factor": null,
"OS-FLV-DISABLED:disabled": null,
"rxtx_quota": null,
"rxtx_cap": null,
"os-flavor-access:is_public": null,
"os_extra_specs": {
"hws:performancetype": "normal",
"hws:availablezone": "az1.dc1"
}
},
]
},
"status": 200
}
"""
uri = "/v1/%s/cloudservers/flavors" % project_id
return self.get(uri)
def get_server_ips(self, project_id, server_id):
"""
:param project_id:
:param server_id:
:return:
{
"interfaceAttachments": [
{
"port_state": "ACTIVE",
"fixed_ips": [
{
"subnet_id": "f8a6e8f8-c2ec-497c-9f23-da9616de54ef",
"ip_address": "192.168.1.3"
}
],
"net_id": "3cb9bc59-5699-4588-a4b1-b87f96708bc6",
"port_id": "ce531f90-199f-48c0-816c-13e38010b442",
"mac_addr": "fa:16:3e:4c:2c:30"
}
]
}
"""
uri = '/v2/%s/servers/%s/os-interface' % (project_id, server_id)
return self.get(uri)
def delete_server(self, project_id, server_id_list, delete_public_ip, delete_volume):
"""
{
"servers": [
{
"id": "616fb98f-46ca-475e-917e-2563e5a8cd19"
}
],
"delete_publicip": False,
"delete_volume": False
}
:param project_id: string, project id
:param server_id_list: list, e.g. [server_id, ...]
:param delete_public_ip: boolean
:param delete_volume: boolean
:return:
"""
uri = "/v1/%s/cloudservers/delete" % project_id
request_body_dict = {}
server_dict_list = []
for server_id in server_id_list:
id_dict = {"id":server_id}
server_dict_list.append(id_dict)
request_body_dict["servers"] = server_dict_list
request_body_dict["delete_publicip"] = delete_public_ip
request_body_dict["delete_volume"] = delete_volume
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def stop_server(self, project_id, server_id):
uri = '/v2/%s/servers/%s/action' % (project_id, server_id)
request_body_dict = {}
request_body_dict['os-stop'] = {}
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def stop_servers(self, project_id, servers_list):
pass
def start_server(self, project_id, server_id):
uri = '/v2/%s/servers/%s/action' % (project_id, server_id)
request_body_dict = {"os-start": {}}
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def reboot_hard(self, project_id, server_id):
return self.reboot(project_id, server_id, "HARD")
def reboot_soft(self, project_id, server_id):
return self.reboot(project_id, server_id, "SOFT")
def reboot(self, project_id, server_id, type):
"""
:param project_id:
:param server_id:
:param type: string, "SOFT" or "HARD"
:return:
"""
uri = '/v2/%s/servers/%s/action' % (project_id, server_id)
request_body_dict = {
"reboot": {
"type": type
}
}
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def attach_volume(self, project_id, server_id, volume_id, device_name):
uri = 'v1/%s/cloudservers/%s/attachvolume' % (project_id, server_id)
request_body = {}
volume_attachment = {}
volume_attachment['volumeId'] = volume_id
volume_attachment['device'] = device_name
request_body['volumeAttachment'] = volume_attachment
request_body_string = json.dumps(request_body)
response = self.post(uri, request_body_string)
return response
def detach_volume(self, project_id, server_id, attachment_id):
uri = '/v1/%s/cloudservers/%s/detachvolume/%s' % (project_id, server_id, attachment_id)
response = self.delete(uri)
return response
def get_volume_list(self, project_id, server_id):
uri = '/v2/%s/servers/%s/os-volume_attachments' % (project_id, server_id)
response = self.get(uri)
return response
def add_nics(self, project_id, server_id, nics):
"""
:param project_id: string
:param server_id: string
:param nics: list
:return:
"""
uri = '/v1/%s/cloudservers/%s/nics' % (project_id, server_id)
request_body_dict = {"nics": nics}
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def get_all_nics(self, project_id, server_id):
"""
:param project_id:string
:param server_id: string
:return:
{
"interfaceAttachments": [
{
"port_state": "ACTIVE",
"fixed_ips": [
{
"subnet_id": "f8a6e8f8-c2ec-497c-9f23-da9616de54ef",
"ip_address": "192.168.1.3"
}
],
"net_id": "3cb9bc59-5699-4588-a4b1-b87f96708bc6",
"port_id": "ce531f90-199f-48c0-816c-13e38010b442",
"mac_addr": "fa:16:3e:4c:2c:30"
}
]
}
"""
uri = '/v2/%s/servers/%s/os-interface' % (project_id, server_id)
return self.get(uri)
def get_nic_info(self, project_id, server_id, nic_id):
"""
:param project_id: string
:param server_id: string
:param nic_id: string
:return:
{
"interfaceAttachment":
{
"port_state": "ACTIVE",
"fixed_ips": [
{
"subnet_id": "f8a6e8f8-c2ec-497c-9f23-da9616de54ef",
"ip_address": "192.168.1.3"
}
],
"net_id": "3cb9bc59-5699-4588-a4b1-b87f96708bc6",
"port_id": "ce531f90-199f-48c0-816c-13e38010b442",
"mac_addr": "fa:16:3e:4c:2c:30"
}
}
"""
uri = '/v2/%s/servers/%s/os-interface/%s' % (project_id, server_id, nic_id)
return self.get(uri)
| {
"repo_name": "Hybrid-Cloud/cloud_manager",
"path": "code/hwcloud/hws_service/ecs_service.py",
"copies": "2",
"size": "14152",
"license": "apache-2.0",
"hash": -1633222104952285700,
"line_mean": 32.4562647754,
"line_max": 163,
"alpha_frac": 0.4810627473,
"autogenerated": false,
"ratio": 3.7648310720936418,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005308859519629795,
"num_lines": 423
} |
__author__ = 'Administrator'
import json
from heat.engine.resources.hwcloud.hws_service import HWSService
class IMSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(IMSService, self).__init__(ak, sk, 'IMS', region, protocol, host, port)
def list(self, name=None):
uri = '/v2/cloudimages'
if name:
uri = uri + "?name=" + name
return self.get(uri)
def create_image(self, name, description, instance_id=None, backup_id=None):
"""
POST /v2/cloudimages/action
Request Body:
{
"name":"ims_test",
"description":"xxxxx",
"instance_id":"877a2cda-ba63-4e1e-b95f-e67e48b6129a"
}
Request Body:
{
"name":"ims_test",
"description":"xxxxx",
"backup_id":"f5bb2392-db73-4986-8aed-9623a1474b2c"
}
:param project_id:
:return:
"""
uri = '/v2/cloudimages/action'
request_body_dict = {}
request_body_dict['name'] = name
request_body_dict['description'] = description
if instance_id:
request_body_dict['instance_id'] = instance_id
if backup_id:
request_body_dict['backup_id'] = backup_id
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def delete_image(self, image_id):
uri = '/v2/images/%s' % image_id
response = self.delete(uri)
return response | {
"repo_name": "hgqislub/hybird-orchard",
"path": "code/hwcloud/hws_service/ims_service.py",
"copies": "1",
"size": "1552",
"license": "apache-2.0",
"hash": 1848388410172251600,
"line_mean": 27.2363636364,
"line_max": 85,
"alpha_frac": 0.5702319588,
"autogenerated": false,
"ratio": 3.410989010989011,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44812209697890104,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import json
from heat.engine.resources.hwcloud.hws_service import HWSService
class VBSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(VBSService, self).__init__(ak, sk, 'VPC', region, protocol, host, port)
def create_backup(self, project_id, volume_id, name=None, description=None):
"""
job create response:
{
'body':
{
'job_id': ''
},
'status': 200
}
get job detail response:
{
'body':
{
'status': 'FAIL',
'fail_reason': 'CreateBackup Task-fail: fsp cinder return backup status is error.'
'job_id': '4010b39b523388bb015252460a581a3d',
'job_type': 'bksCreateBackup',
'entities':
{
'bks_create_volume_name': 'autobk_volume_2016-01-15T01:0807.476Z',
'backup_id': '',
'snapshot_id': 'faddfasdf',
'volume_id': 'safdafadf'
},
'end_time': '2016-01-15T01:0807.476Z',
'begin_time': '2016-01-15T01:0807.476Z',
'error_code': 'VolumeBackup.0064'
},
'status': 200
}
:param project_id:
:param volume_id:
:param name:
:param description:
:return:
"""
uri = '/v2/%s/cloudbackups' % project_id
request_body = {}
backup = {}
backup['volume_id'] = volume_id
if name:
backup['name'] = name
if description:
backup['description'] = description
request_body['backup'] = backup
request_body_string = json.dumps(request_body)
response = self.post(uri, request_body_string)
return response
def delete_backup(self, project_id, backup_id):
uri = '/v2/%s/cloudbackups/%s' % (project_id, backup_id)
response = self.delete(uri)
return response
| {
"repo_name": "Hybrid-Cloud/cloud_manager",
"path": "code/hwcloud/hws_service/vbs_service.py",
"copies": "3",
"size": "2195",
"license": "apache-2.0",
"hash": -3324985466395946500,
"line_mean": 30.3571428571,
"line_max": 102,
"alpha_frac": 0.474715262,
"autogenerated": false,
"ratio": 4.027522935779817,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.014917266280609161,
"num_lines": 70
} |
__author__ = 'Administrator'
import json
from heat.engine.resources.hwcloud.hws_service import HWSService
class VPCService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(VPCService, self).__init__(ak, sk, 'VPC', region, protocol, host, port)
def list_vpc(self, project_id, opts=None):
"""
:param project_id: string
:param opts: dict
:return: dict
{
u'body': {
u'vpcs': [{
u'status': u'OK',
u'cidr': u'172.21.0.0/16',
u'id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'name': u'VPC_2015-10-21-11-30-28'
}]
},
u'status': 200
}
"""
uri = "/v1/%s/vpcs" % project_id
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def create_vpc(self, project_id, name, cidr):
"""
:param project_id: string
:param opts: dict
:return: dict
{
u'body': {
u'vpcs': [{
u'status': u'OK',
u'cidr': u'172.21.0.0/16',
u'id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'name': u'VPC_2015-10-21-11-30-28'
}]
},
u'status': 200
}
"""
uri = "/v1/%s/vpcs" % project_id
request_body_dict = {}
vpc_map = dict()
vpc_map["name"] = name
vpc_map["cidr"] = cidr
request_body_dict["vpc"] = vpc_map
request_body_string = json.dumps(request_body_dict)
return self.post(uri, request_body_string)
def delete_vpc(self, project_id, vpc_id):
"""
:param project_id: string
:param vpc_id: string
:return: dict
{
u'status': 204
}
"""
uri = "/v1/%s/vpcs/%s" % (project_id, vpc_id)
return self.delete(uri)
def list_vpc_detail(self, project_id, vpc_id):
"""
:param project_id: string
:param vpc_id: string
:return: dict
{
u'body': {
u'vpc': {
u'status': u'OK',
u'cidr': u'172.21.0.0/16',
u'id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'name': u'VPC_2015-10-21-11-30-28'
}
},
u'status': 200
}
"""
uri = "/v1/%s/vpcs/%s" % (project_id, vpc_id)
return self.get(uri)
def list_subnet(self, project_id, opts=None):
"""
:param project_id: string
:param opts: dict
:return: dict
{
u'body': {
u'subnets': [{
u'status': u'ACTIVE',
u'name': u'Subnet1',
u'dhcp_enable': True,
u'availability_zone': u'cn-north-1a',
u'primary_dns': u'114.114.114.114',
u'gateway_ip': u'172.21.0.1',
u'vpc_id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'cidr': u'172.21.0.0/24',
u'secondary_dns': u'114.114.115.115',
u'id': u'7bd9410f-38bb-4fbb-aa7a-cf4a22cb20f3'
},
{
u'status': u'ACTIVE',
u'name': u'subnet3',
u'dhcp_enable': True,
u'availability_zone': u'cn-north-1a',
u'primary_dns': u'114.114.114.114',
u'gateway_ip': u'172.21.2.1',
u'vpc_id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'cidr': u'172.21.2.0/24',
u'secondary_dns': u'114.114.115.115',
u'id': u'9fd27cfd-a988-4495-ae7c-c5521d8a5c09'
},
{
u'status': u'ACTIVE',
u'name': u'Subnet2',
u'dhcp_enable': True,
u'availability_zone': u'cn-north-1a',
u'primary_dns': u'114.114.114.114',
u'gateway_ip': u'172.21.1.1',
u'vpc_id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'cidr': u'172.21.1.0/24',
u'secondary_dns': u'114.114.115.115',
u'id': u'd654fe9f-0edc-42f0-a52b-f8c4cb8ac1da'
}]
},
u'status': 200
}
"""
uri = "/v1/%s/subnets" % project_id
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def list_subnet_detail(self, project_id, subnet_id):
"""
:param project_id: string
:param subnet_id: string
:return: dict
{
u'body': {
u'subnet': {
u'status': u'ACTIVE',
u'name': u'Subnet1',
u'dhcp_enable': True,
u'availability_zone': u'cn-north-1a',
u'primary_dns': u'114.114.114.114',
u'gateway_ip': u'172.21.0.1',
u'vpc_id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'cidr': u'172.21.0.0/24',
u'secondary_dns': u'114.114.115.115',
u'id': u'7bd9410f-38bb-4fbb-aa7a-cf4a22cb20f3'
}
},
u'status': 200
}
"""
uri = "/v1/%s/subnets/%s" % (project_id, subnet_id)
return self.get(uri)
def create_subnet(self, project_id, name, cidr, availability_zone,gateway_ip, vpc_id,
dhcp_enable=None, primary_dns=None, secondary_dns=None):
"""
:param project_id: string
:param name: string
:param cidr: string, '172.21.0.0/24'
:param availability_zone: string, 'cn-north-1a'
:param vpc_id: string
:param gateway_ip: string
:param dhcp_enable: string
:param primary_dns: string
:param secondary_dns: string
:return:
"""
uri = "/v1/%s/subnets" % project_id
request_body_dict = {}
subnet_map = dict()
subnet_map["name"] = name
subnet_map["cidr"] = cidr
subnet_map["availability_zone"] = availability_zone
subnet_map["vpc_id"] = vpc_id
subnet_map["gateway_ip"] = gateway_ip
if dhcp_enable:
subnet_map["dhcp_enable"] = dhcp_enable
if primary_dns:
subnet_map["primary_dns"] = primary_dns
if secondary_dns:
subnet_map["secondary_dns"] = secondary_dns
request_body_dict["subnet"] = subnet_map
request_body_string = json.dumps(request_body_dict)
return self.post(uri, request_body_string)
def delete_subnet(self, project_id, vpc_id, subnet_id):
"""
:param project_id: string
:param vpc_id: string
:param subnet_id: string
:return: dict
{
u'status': 204
}
"""
uri = "/v1/%s/vpcs/%s/subnets/%s" % (project_id, vpc_id, subnet_id)
return self.delete(uri)
def list_public_ips(self, project_id, opts=None):
"""
:param project_id: string
:param opts: dict
:return: dict
{
u'status': 200
"body"{
"publicips": [
{
"id": "6285e7be-fd9f-497c-bc2d-dd0bdea6efe0",
"status": "DOWN",
"type": "5_bgp",
"public_ip_address": "161.17.101.9",
"private_ip_address": "192.168.10.5",
"tenant_id": "8b7e35ad379141fc9df3e178bd64f55c",
"create_time": "2015-07-16 04:22:32",
"bandwidth_id": "3fa5b383-5a73-4dcb-a314-c6128546d855",
"bandwidth_share_type": "PER",
"bandwidth_size": 5
},
{
"id": "80d5b82e-43b9-4f82-809a-37bec5793bd4",
"status": "DOWN",
"type": "5_bgp",
"public_ip_address": "161.17.101.10",
"private_ip_address": "192.168.10.6",
"tenant_id": "8b7e35ad379141fc9df3e178bd64f55c",
"create_time": "2015-07-16 04:23:03",
"bandwidth_id": "a79fd11a-047b-4f5b-8f12-99c178cc780a",
"bandwidth_share_type": "PER",
"bandwidth_size": 5
}
]
}
}
"""
uri = "/v1/%s/publicips" % project_id
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def create_public_ip(self, project_id, public_ip, bandwidth):
"""
:param project_id: string
:param public_ip: dict
:param bandwidth: dict
:return: dict
{
"body"{
"publicip": {
"id": "f588ccfa-8750-4d7c-bf5d-2ede24414706",
"status": "PENDING_CREATE",
"type": "5_bgp",
"public_ip_address": "161.17.101.7",
"tenant_id": "8b7e35ad379141fc9df3e178bd64f55c",
"create_time": "2015-07-16 04:10:52",
"bandwidth_size": 0
}
}
"status" : 200
}
"""
uri = "/v1/%s/publicips" % project_id
request_body_dict = dict()
request_body_dict["publicip"] = public_ip
request_body_dict["bandwidth"] = bandwidth
request_body_string = json.dumps(request_body_dict)
return self.post(uri, request_body_string)
def delete_public_ip(self, project_id, public_ip_id):
"""
:param project_id: string
:param public_ip_id: string
:return: dict
{
"status" : 204
}
"""
uri = "/v1/%s/publicips/%s" % (project_id, public_ip_id)
return self.delete(uri)
def list_security_groups(self, project_id, opts=None):
"""
:param project_id: string
:param opts: dict
:return: dict
{
"security_groups": [
{
"id": "16b6e77a-08fa-42c7-aa8b-106c048884e6",
"name": "qq",
"vpc_id": "3ec3b33f-ac1c-4630-ad1c-7dba1ed79d85",
"security_group_rules": [
{
"direction": "egress",
"ethertype": "IPv4",
"id": "369e6499-b2cb-4126-972a-97e589692c62",
"security_group_id": "16b6e77a-08fa-42c7-aa8b-106c048884e6"
},
{
"direction": "ingress",
"ethertype": "IPv4",
"id": "0222556c-6556-40ad-8aac-9fd5d3c06171",
"remote_group_id": "16b6e77a-08fa-42c7-aa8b-106c048884e6",
"security_group_id": "16b6e77a-08fa-42c7-aa8b-106c048884e6"
}
]
},
{
"id": "9c0f56be-a9ac-438c-8c57-fce62de19419",
"name": "default",
"vpc_id": "13551d6b-755d-4757-b956-536f674975c0",
"security_group_rules": [
{
"direction": "egress",
"ethertype": "IPv4",
"id": "95479e0a-e312-4844-b53d-a5e4541b783f",
"security_group_id": "9c0f56be-a9ac-438c-8c57-fce62de19419"
},
{
"direction": "ingress",
"ethertype": "IPv4",
"id": "0c4a2336-b036-4fa2-bc3c-1a291ed4c431",
"remote_group_id": "9c0f56be-a9ac-438c-8c57-fce62de19419",
"security_group_id": "9c0f56be-a9ac-438c-8c57-fce62de19419"
}
]
}
]
}
"""
uri = "/v1/%s/security-groups" % project_id
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def create_security_group_rule(self, project_id, security_group_id, direction,
ethertype, protocol=None, port_range_min=None,
port_range_max = None, remote_ip_prefix = None,
remote_group_id = None):
"""
:param project_id:
:param security_group_id:
:param direction:
:param ethertype:
:param protocol:
:param port_range_min:
:param port_range_max:
:param remote_ip_prefix:
:param remote_group_id:
:return:
{
"security_group_rule":{
"direction":"ingress",
"ethertype":"IPv4",
"id":"2bc0accf-312e-429a-956e-e4407625eb62",
"port_range_max":80,
"port_range_min":80,
"protocol":"tcp",
"remote_group_id":"85cc3048-abc3-43cc-89b3-377341426ac5",
"remote_ip_prefix":null,
"security_group_id":"a7734e61-b545-452d-a3cd-0189cbd9747a",
"tenant_id":"e4f50856753b4dc6afee5fa6b9b6c550"
}
}
"""
uri = "/v1/%s/security-group-rules" % project_id
request_body_dict = dict()
security_group_rule = dict()
security_group_rule["security_group_id"] = security_group_id
security_group_rule["direction"] = direction
security_group_rule["ethertype"] = ethertype
if protocol:
security_group_rule["protocol"] = protocol
if port_range_min:
security_group_rule["port_range_min"] = port_range_min
if port_range_max:
security_group_rule["port_range_max"] = port_range_max
if remote_ip_prefix:
security_group_rule["remote_ip_prefix"] = remote_ip_prefix
if remote_group_id:
security_group_rule["remote_group_id"] = remote_group_id
request_body_dict["security_group_rule"] = security_group_rule
request_body_string = json.dumps(request_body_dict)
return self.post(uri, request_body_string)
def list_server_nics(self, project_id, server_id):
"""
:param project_id:
:param server_id:
:return:
{
"interfaceAttachments": [
{
"port_state": "ACTIVE",
"fixed_ips": [
{
"subnet_id": "f8a6e8f8-c2ec-497c-9f23-da9616de54ef",
"ip_address": "192.168.1.3"
}
],
"net_id": "3cb9bc59-5699-4588-a4b1-b87f96708bc6",
"port_id": "ce531f90-199f-48c0-816c-13e38010b442",
"mac_addr": "fa:16:3e:4c:2c:30"
}
]
}
"""
uri = "/v2/%s/servers/%s/os-interface" % (project_id, server_id)
return self.get(uri)
def bind_public_ip(self, project_id, public_ip_id, port_id):
"""
:param project_id:
:param public_id:
:param port_id:
:return:
{
"publicip": {
"id": "f588ccfa-8750-4d7c-bf5d-2ede24414706",
"status": "PENDING_UPDATE",
"type": "5_bgp",
"public_ip_address": "161.17.101.7",
"port_id": "f588ccfa-8750-4d7c-bf5d-2ede24414706",
"tenant_id": "8b7e35ad379141fc9df3e178bd64f55c",
"create_time": "2015-07-16 04:10:52",
"bandwidth_size": 6
}
}
"""
uri = "/v1/%s/publicips/%s" % (project_id, public_ip_id)
request_body_dict = dict()
publicip = dict()
publicip["port_id"] = port_id
request_body_dict["publicip"] = publicip
request_body_string = json.dumps(request_body_dict)
return self.put(uri, request_body_string)
def update_port(self, port_id, name=None, security_groups=None,
allowed_address_pairs=None, extra_dhcp_opts=None):
"""
:param project_id: string
:param port_id: string
:param name: string
:param security_groups:list
:param allowed_address_pairs:list,
use 1.1.1.1/0 as mac_ip to release mac-ip bind
:param extra_dhcp_opts: list
:return:
{
"port": {
"id": "7204e0da-40de-4207-a536-6f59b84f6f0e",
"name": "adc",
"status": "DOWN",
"admin_state_up": "true",
"fixed_ips": [
{
"subnet_id": "689156ca-038f-4478-b265-fd26aa8bbe31",
"ip_address": "192.168.0.9"
}
],
"mac_address": "fa:16:3e:d7:f2:6c",
"network_id": "b4152e98-e3af-4e49-bb7f-7766e2b5ec63",
"tenant_id": "caa6cf4337ea47fb823b15709ebe8591",
"device_id": "",
"device_owner": "",
"security_groups": [
"59b39002-e79b-4bac-8e27-aa884ab1beb6"
],
"extra_dhcp_opts": [],
"allowed_address_pairs": [],
"binding:vnic_type": "normal"
}
}
"""
uri = "/v1/ports/%s" % (port_id)
request_body_dict = dict()
port = dict()
if name:
port["name"] = name
if security_groups:
port["security_groups"] = security_groups
if allowed_address_pairs:
port["allowed_address_pairs"] = allowed_address_pairs
if extra_dhcp_opts:
port["extra_dhcp_opts"] = extra_dhcp_opts
request_body_dict["port"] = port
request_body_string = json.dumps(request_body_dict)
return self.put(uri, request_body_string) | {
"repo_name": "hgqislub/hybird-orchard",
"path": "code/hwcloud/hws_service/vpc_service.py",
"copies": "1",
"size": "18665",
"license": "apache-2.0",
"hash": -3385705767318458400,
"line_mean": 34.3522727273,
"line_max": 89,
"alpha_frac": 0.4543798553,
"autogenerated": false,
"ratio": 3.4603262884686687,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44147061437686685,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import json
from hwcloud.hws_service import HWSService
from hwcloud.hws_service import uri as hws_uri
class ECSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(ECSService, self).__init__(ak, sk, 'ECS', region, protocol, host, port)
def list(self, project_id, opts=None):
"""
:param project_id: string
:param opts: dict
:return:
{
u'body': {
u'servers': [{
u'id': u'817187bd-7691-408b-a78e-0bb0e8407cd6',
u'links': [{
u'href': u'https: //compute.region.cnnorth1.hwclouds.com/v2/91d957f0b92d48f0b184c26975d2346e/servers/817187bd-7691-408b-a78e-0bb0e8407cd6',
u'rel': u'self'
},
{
u'href': u'https: //compute.region.cnnorth1.hwclouds.com/91d957f0b92d48f0b184c26975d2346e/servers/817187bd-7691-408b-a78e-0bb0e8407cd6',
u'rel': u'bookmark'
}],
u'name': u's_server_01'
}]
},
u'status': 200
}
"""
uri = hws_uri.ECS.LIST_SERVERS.format(project_id=project_id)
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def list_detail(self, project_id, opts=None):
uri = hws_uri.ECS.LIST_DETAILS.format(project_id=project_id)
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def get_detail(self, project_id, server_id):
"""
:param project_id:
:param server_id:
:return:
{
u'body': {
u'server': {
u'OS-EXT-STS: task_state': None,
u'addresses': {
u'bd73a684-12dc-4721-a548-be1ac3599f37': [{
u'OS-EXT-IPS-MAC: mac_addr': u'fa: 16: 3e: c6: bf: 75',
u'version': 4,
u'addr': u'172.28.48.27',
u'OS-EXT-IPS: type': u'fixed'
},
{
u'OS-EXT-IPS-MAC: mac_addr': u'fa: 16: 3e: bd: d2: ef',
u'version': 4,
u'addr': u'172.28.0.29',
u'OS-EXT-IPS: type': u'fixed'
},
{
u'OS-EXT-IPS-MAC: mac_addr': u'fa: 16: 3e: fa: 7d: 93',
u'version': 4,
u'addr': u'172.28.48.28',
u'OS-EXT-IPS: type': u'fixed'
}]
},
u'links': [{
u'href': u'https: //compute.region.cnnorth1.hwclouds.com/v2/91d957f0b92d48f0b184c26975d2346e/servers/6058fb3e-4eec-419d-ad0b-810876f6ac76',
u'rel': u'self'
},
{
u'href': u'https: //compute.region.cnnorth1.hwclouds.com/91d957f0b92d48f0b184c26975d2346e/servers/6058fb3e-4eec-419d-ad0b-810876f6ac76',
u'rel': u'bookmark'
}],
u'image': u'',
u'OS-EXT-STS: vm_state': u'active',
u'OS-EXT-SRV-ATTR: instance_name': u'instance-0001bb38',
u'OS-SRV-USG: launched_at': u'2016-04-23T06: 20: 05.000000',
u'flavor': {
u'id': u'c1.large',
u'links': [{
u'href': u'https: //compute.region.cnnorth1.hwclouds.com/91d957f0b92d48f0b184c26975d2346e/flavors/c1.large',
u'rel': u'bookmark'
}]
},
u'id': u'6058fb3e-4eec-419d-ad0b-810876f6ac76',
u'security_groups': [{
u'name': u'bd73a684-12dc-4721-a548-be1ac3599f37'
},
{
u'name': u'bd73a684-12dc-4721-a548-be1ac3599f37'
},
{
u'name': u'bd73a684-12dc-4721-a548-be1ac3599f37'
}],
u'user_id': u'365cc2ee5b064ffd8bcec2e4fe1af6c7',
u'OS-DCF: diskConfig': u'MANUAL',
u'accessIPv4': u'',
u'accessIPv6': u'',
u'progress': 0,
u'OS-EXT-STS: power_state': 1,
u'OS-EXT-AZ: availability_zone': u'cn-north-1a',
u'config_drive': u'',
u'status': u'ACTIVE',
u'updated': u'2016-04-23T06: 20: 08Z',
u'hostId': u'1ff2a35f6e2c07cf3a28a47042837ebeeb4a602ce71c2c5df3e2d116',
u'OS-EXT-SRV-ATTR: host': u'pod1a.cnnorth1',
u'OS-SRV-USG: terminated_at': None,
u'key_name': None,
u'OS-EXT-SRV-ATTR: hypervisor_hostname': u'pod1a.cnnorth1',
u'name': u'lW9y5hNY',
u'created': u'2016-04-23T06: 18: 55Z',
u'tenant_id': u'91d957f0b92d48f0b184c26975d2346e',
u'os-extended-volumes: volumes_attached': [{
u'id': u'5686a9f0-7715-4a41-ab45-9acc2d3e6f84'
}],
u'metadata': {
u'metering.image_id': u'8c1ebcf4-4798-423d-81c0-96b679534539',
u'metering.imagetype': u'private',
u'metering.resourcespeccode': u'c1.large.linux',
u'metering.cloudServiceType': u'hws.service.type.ec2',
u'image_name': u'hybridvm-base-423_02',
u'os_bit': u'64',
u'metering.resourcetype': u'1',
u'vpc_id': u'bd73a684-12dc-4721-a548-be1ac3599f37',
u'os_type': u'Linux',
u'charging_mode': u'0'
}
}
},
u'status': 200
}
"""
uri = hws_uri.ECS.GET_DETAIL.format(project_id=project_id, server_id=server_id)
return self.get(uri)
def create_server(self, project_id, image_ref, flavor_ref, name, vpcid, nics_subnet_list, root_volume_type,
personality_path=None, personality_contents=None, adminPass=None, public_ip_id=None, count=None,
data_volumes=None, security_groups=None, availability_zone=None, key_name=None):
"""
Rest method: POST
Uri for create server: /v1/{project_id}/cloudservers
Request body of create server in hws is as following:
{
"server": {
"availability_zone": "az1.dc1",
"name": "newserver",
"imageRef": "imageid",
"root_volume": {
"volumetype": "SATA"
},
"data_volumes": [
{
"volumetype": "SATA",
"size": 100
},
{
"volumetype": "SSD",
"size": 100
}
],
"flavorRef": " 1",
"personality": [
{
"path": "/etc/banner.txt",
"contents": "ICAgICAgDQoiQmFjaA=="
}
],
"vpcid": "vpcid",
"security_groups": [
{
"id": "securitygroupsid"
}
],
"nics": [
{
"subnet_id": "subnetid "
}
],
"publicip": {
"id": "publicipid"
},
"key_name": "keyname",
"adminPass": "password",
"count": 1
}
}
:param project_id: string
:param imageRef: string
:param flavorRef: string
:param name: string
:param vpcid: string
:param nics_subnet_list: list of subnet_id, ['subnet_id_01', 'subnet_id_02']
:param root_volume_type: string
:param personality_path: string
:param personality_contents: string
:param adminPass: string
:param public_ip_id: string
:param count: int
:param data_volumes: list
[
{
"volumetype": "SATA",
"size": 100
},
{
"volumetype": "SSD",
"size": 100
}
]
:param security_groups: list of security group id, e.g. ['sg_id_01', 'sg_id_02']
:param availability_zone: string
:param key_name: string
:return:
{
u'body': {
u'job_id': u'8aace0c851b0a3c10151eca2b4183764'
},
u'status': 200
}
"""
uri = hws_uri.ECS.CREATE_SERVER.format(project_id=project_id)
request_body_dict = {}
request_server_body = {}
request_server_body['imageRef'] = image_ref
request_server_body['flavorRef'] = flavor_ref
request_server_body['name'] = name
request_server_body['vpcid'] = vpcid
if adminPass:
request_server_body['adminPass'] = adminPass
if count:
request_server_body['count'] = count
if personality_path and personality_contents:
list_wrapper = []
personality = {}
personality['path'] = personality_path
personality['contents'] = personality_contents
list_wrapper.append(personality)
request_server_body['personality'] = list_wrapper
if nics_subnet_list:
nics_list = []
for subnet_id in nics_subnet_list:
subnet_dict = {}
subnet_dict['subnet_id'] = subnet_id
nics_list.append(subnet_dict)
request_server_body['nics'] = nics_list
if public_ip_id:
public_ip_dict = {}
public_ip_dict[id] = public_ip_id
request_server_body['publicip'] = public_ip_dict
if root_volume_type:
root_volume_dict = {}
root_volume_dict['volumetype'] = root_volume_type
request_server_body['root_volume'] = root_volume_dict
if data_volumes:
request_server_body['data_volumes'] = data_volumes
if security_groups:
security_group_list = []
for security_group_id in security_groups:
security_group_dict = {}
security_group_dict['id'] = security_group_id
security_group_list.append(security_group_dict)
request_server_body['security_groups'] = security_group_list
if availability_zone:
request_server_body['availability_zone'] = availability_zone
if key_name:
request_server_body['key_name'] = key_name
request_body_dict['server'] = request_server_body
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def list_flavors(self, project_id):
"""
:param project_id: string
:return: dict
{
"body":{
"flavors": [
{
"id": "104",
"name": "m1.large",
"vcpus": "4",
"ram": 8192,
"disk": "80",
"swap": "",
"OS-FLV-EXT-DATA:ephemeral": 0,
"rxtx_factor": null,
"OS-FLV-DISABLED:disabled": null,
"rxtx_quota": null,
"rxtx_cap": null,
"os-flavor-access:is_public": null,
"os_extra_specs": {
"hws:performancetype": "normal",
"hws:availablezone": "az1.dc1"
}
},
]
},
"status": 200
}
"""
uri = hws_uri.ECS.LIST_FLAVOR.format(project_id=project_id)
return self.get(uri)
def delete_server(self, project_id, server_id_list, delete_public_ip, delete_volume):
"""
{
"servers": [
{
"id": "616fb98f-46ca-475e-917e-2563e5a8cd19"
}
],
"delete_publicip": False,
"delete_volume": False
}
:param project_id: string, project id
:param server_id_list: list, e.g. [server_id, ...]
:param delete_public_ip: boolean
:param delete_volume: boolean
:return:
"""
uri = hws_uri.ECS.DELETE_SERVER.format(project_id=project_id)
request_body_dict = {}
server_dict_list = []
for server_id in server_id_list:
id_dict = {"id":server_id}
server_dict_list.append(id_dict)
request_body_dict["servers"] = server_dict_list
request_body_dict["delete_publicip"] = delete_public_ip
request_body_dict["delete_volume"] = delete_volume
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def stop_server(self, project_id, server_id):
uri = hws_uri.ECS.STOP_SERVER.format(project_id=project_id, server_id=server_id)
request_body_dict = {}
request_body_dict['os-stop'] = {}
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def stop_servers(self, project_id, servers_list):
pass
def start_server(self, project_id, server_id):
uri = hws_uri.ECS.START_SERVER.format(project_id=project_id, server_id=server_id)
request_body_dict = {"os-start": {}}
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def reboot_hard(self, project_id, server_id):
return self.reboot(project_id, server_id, "HARD")
def reboot_soft(self, project_id, server_id):
return self.reboot(project_id, server_id, "SOFT")
def reboot(self, project_id, server_id, type):
"""
:param project_id:
:param server_id:
:param type: string, "SOFT" or "HARD"
:return:
"""
uri = hws_uri.ECS.REBOOT_SERVER.format(project_id=project_id, server_id=server_id)
request_body_dict = {
"reboot": {
"type": type
}
}
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def attach_volume(self, project_id, server_id, volume_id, device_name):
uri = hws_uri.ECS.ATTACHE_VOLUME.format(project_id=project_id, server_id=server_id)
request_body = {}
volume_attachment = {}
volume_attachment['volumeId'] = volume_id
volume_attachment['device'] = device_name
request_body['volumeAttachment'] = volume_attachment
request_body_string = json.dumps(request_body)
response = self.post(uri, request_body_string)
return response
def detach_volume(self, project_id, server_id, detachment_id):
uri = hws_uri.ECS.DETACH_VOLUME.format(project_id=project_id,
server_id=server_id,
detachment_id=detachment_id)
response = self.delete(uri)
return response
def get_volume_list(self, project_id, server_id):
uri = hws_uri.ECS.GET_SERVER_VOLUME_LIST.format(project_id=project_id, server_id=server_id)
response = self.get(uri)
return response
| {
"repo_name": "nash-x/hws",
"path": "hwcloud/hws_service/ecs_service.py",
"copies": "1",
"size": "16467",
"license": "apache-2.0",
"hash": -8155163708523457000,
"line_mean": 36.7683486239,
"line_max": 163,
"alpha_frac": 0.4723993441,
"autogenerated": false,
"ratio": 3.719674723288909,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46920740673889094,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import json
from hwcloud.hws_service import HWSService
from hwcloud.hws_service.uri import EVS as evs_uri
class EVSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(EVSService, self).__init__(ak, sk, 'EVS', region, protocol, host, port)
def list(self, project_id, opts=None):
uri = evs_uri.LIST.format(project_id=project_id)
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def create_volume(self, project_id, availability_zone, size, volume_type,
backup_id=None, description=None, name=None, imageRef=None, count=None):
"""
{
"volume": {
"backup_id": null,
"count": 1,
"availability_zone": "az1.dc1",
"description": "test_volume_1",
"size": 120,
"name": "test_volume_1",
"imageRef": null,
"volume_type": "SSD"
}
}
:param project_id:
:param availability_zone:
:param size:
:param volume_type:
:param backup_id:
:param description:
:param name:
:param imageRef:
:param count:
:return: dict
{
"job_id": "70a599e0-31e7-49b7-b260-868f441e862b",
}
or
{
"error": {
"message": "XXXX",
"code": "XXX"
}
}
Get job detail result:
{
u'body': {
u'status': u'RUNNING',
u'fail_reason': None,
u'job_id': u'8aace0c651b0a02301521ae1f96c6138',
u'job_type': u'createVolume',
u'entities': {
u'volume_id': u'9bd6fa88-0e60-48e5-ae61-7e028dbdf045'
},
u'end_time': u'',
u'begin_time': u'2016-01-07T06: 59: 23.115Z',
u'error_code': None
},
u'status': 200
}
{
u'body': {
u'status': u'SUCCESS',
u'fail_reason': None,
u'job_id': u'8aace0c651b0a02301521ae1f96c6138',
u'job_type': u'createVolume',
u'entities': {
u'volume_id': u'9bd6fa88-0e60-48e5-ae61-7e028dbdf045'
},
u'end_time': u'2016-01-07T06: 59: 48.279Z',
u'begin_time': u'2016-01-07T06: 59: 23.115Z',
u'error_code': None
},
u'status': 200
}
Failed job result:
{
u'body': {
u'status': u'FAIL',
u'fail_reason': u"EbsCreateVolumeTask-fail:badRequest: Invalid input received: Availability zone 'cn-north-1' is invalid",
u'job_id': u'8aace0c651b0a02301521ab7e58660ca',
u'job_type': u'createVolume',
u'entities': {
},
u'end_time': u'2016-01-07T06: 13: 25.809Z',
u'begin_time': u'2016-01-07T06: 13: 25.509Z',
u'error_code': u'EVS.5400'
},
u'status': 200
}
"""
uri = evs_uri.CREATE_VOLUME.format(project_id=project_id)
request_body_dict = {}
volume = {}
volume['availability_zone'] = availability_zone
volume['size'] = size
volume['volume_type'] = volume_type
if backup_id:
volume['backup_id'] = backup_id
if description:
volume['description'] = description
if name:
volume['name'] = name
if imageRef:
volume['imageRef'] = imageRef
if count:
volume['count'] = count
request_body_dict['volume'] = volume
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def delete_volume(self, project_id, volume_id):
"""
DELETE /v2/{tenant_id}/cloudvolumes/{volume_id}
:return:
"""
uri = evs_uri.DELETE_VOLUME.format(project_id=project_id, volume_id=volume_id)
response = self.delete(uri)
return response
def get_volume_detail(self, project_id, volume_id):
uri = evs_uri.GET_VOLUME_DETAIL.format(project_id=project_id, volume_id=volume_id)
response = self.get(uri)
return response | {
"repo_name": "nash-x/hws",
"path": "hwcloud/hws_service/evs_service.py",
"copies": "1",
"size": "4569",
"license": "apache-2.0",
"hash": 1625309150355372800,
"line_mean": 29.4666666667,
"line_max": 138,
"alpha_frac": 0.4909170497,
"autogenerated": false,
"ratio": 3.583529411764706,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45744464614647057,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import json
from hwcloud.hws_service import HWSService
from hwcloud.hws_service.uri import IMS as ims_uri
class IMSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(IMSService, self).__init__(ak, sk, 'IMS', region, protocol, host, port)
def list(self, project_id):
uri = ims_uri.LIST
return self.get(uri)
def create_image(self, name, description, instance_id=None, backup_id=None):
"""
POST /v2/cloudimages/action
Request Body:
{
"name":"ims_test",
"description":"xxxxx",
"instance_id":"877a2cda-ba63-4e1e-b95f-e67e48b6129a"
}
Request Body:
{
"name":"ims_test",
"description":"xxxxx",
"backup_id":"f5bb2392-db73-4986-8aed-9623a1474b2c"
}
:param project_id:
:return:
"""
uri = ims_uri.CREATE_IMAGE
request_body_dict = {}
request_body_dict['name'] = name
request_body_dict['description'] = description
if instance_id:
request_body_dict['instance_id'] = instance_id
if backup_id:
request_body_dict['backup_id'] = backup_id
request_body_string = json.dumps(request_body_dict)
response = self.post(uri, request_body_string)
return response
def delete_image(self, image_id):
uri = ims_uri.DELETE_IMAGE.format(image_id=image_id)
response = self.delete(uri)
return response | {
"repo_name": "nash-x/hws",
"path": "hwcloud/hws_service/ims_service.py",
"copies": "1",
"size": "1536",
"license": "apache-2.0",
"hash": 3991511812907292000,
"line_mean": 27.462962963,
"line_max": 85,
"alpha_frac": 0.5885416667,
"autogenerated": false,
"ratio": 3.375824175824176,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9458120301148374,
"avg_score": 0.0012491082751602792,
"num_lines": 54
} |
__author__ = 'Administrator'
import json
from hwcloud.hws_service import HWSService
from hwcloud.hws_service.uri import VBS as vbs_uri
class VBSService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(VBSService, self).__init__(ak, sk, 'VPC', region, protocol, host, port)
def create_backup(self, project_id, volume_id, name=None, description=None):
"""
job create response:
{
'body':
{
'job_id': ''
},
'status': 200
}
get job detail response:
{
'body':
{
'status': 'FAIL',
'fail_reason': 'CreateBackup Task-fail: fsp cinder return backup status is error.'
'job_id': '4010b39b523388bb015252460a581a3d',
'job_type': 'bksCreateBackup',
'entities':
{
'bks_create_volume_name': 'autobk_volume_2016-01-15T01:0807.476Z',
'backup_id': '',
'snapshot_id': 'faddfasdf',
'volume_id': 'safdafadf'
},
'end_time': '2016-01-15T01:0807.476Z',
'begin_time': '2016-01-15T01:0807.476Z',
'error_code': 'VolumeBackup.0064'
},
'status': 200
}
:param project_id:
:param volume_id:
:param name:
:param description:
:return:
"""
uri = vbs_uri.COMPOSITE_INTERFACE_VBS_PREFIX.format(project_id=project_id)
request_body = {}
backup = {}
backup['volume_id'] = volume_id
if name:
backup['name'] = name
if description:
backup['description'] = description
request_body['backup'] = backup
request_body_string = json.dumps(request_body)
response = self.post(uri, request_body_string)
return response
def delete_backup(self, project_id, backup_id):
uri = vbs_uri.DELETE_BACKUP.format(project_id=project_id, backup_id=backup_id)
response = self.delete(uri)
return response
| {
"repo_name": "nash-x/hws",
"path": "hwcloud/hws_service/vbs_service.py",
"copies": "1",
"size": "2279",
"license": "apache-2.0",
"hash": 5480833994361490000,
"line_mean": 31.5571428571,
"line_max": 102,
"alpha_frac": 0.4923211935,
"autogenerated": false,
"ratio": 3.9773123909249564,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9890984697380626,
"avg_score": 0.01572977740886603,
"num_lines": 70
} |
__author__ = 'Administrator'
import json
from hwcloud.hws_service import HWSService
from hwcloud.hws_service.uri import VPC as vpc_uri
class VPCService(HWSService):
def __init__(self, ak, sk, region, protocol, host, port):
super(VPCService, self).__init__(ak, sk, 'VPC', region, protocol, host, port)
def list_vpc(self, project_id, opts=None):
"""
:param project_id: string
:param opts: dict
:return: dict
{
u'body': {
u'vpcs': [{
u'status': u'OK',
u'cidr': u'172.21.0.0/16',
u'id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'name': u'VPC_2015-10-21-11-30-28'
}]
},
u'status': 200
}
"""
uri = vpc_uri.LIST.format(project_id=project_id)
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def list_vpc_detail(self, project_id, vpc_id):
"""
:param project_id: string
:param vpc_id: string
:return: dict
{
u'body': {
u'vpc': {
u'status': u'OK',
u'cidr': u'172.21.0.0/16',
u'id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'name': u'VPC_2015-10-21-11-30-28'
}
},
u'status': 200
}
"""
uri = vpc_uri.GET_VPC_DETAIL.format(project_id=project_id, vpc_id=vpc_id)
return self.get(uri)
def list_subnet(self, project_id, opts=None):
"""
:param project_id: string
:param opts: dict
:return: dict
{
u'body': {
u'subnets': [{
u'status': u'ACTIVE',
u'name': u'Subnet1',
u'dhcp_enable': True,
u'availability_zone': u'cn-north-1a',
u'primary_dns': u'114.114.114.114',
u'gateway_ip': u'172.21.0.1',
u'vpc_id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'cidr': u'172.21.0.0/24',
u'secondary_dns': u'114.114.115.115',
u'id': u'7bd9410f-38bb-4fbb-aa7a-cf4a22cb20f3'
},
{
u'status': u'ACTIVE',
u'name': u'subnet3',
u'dhcp_enable': True,
u'availability_zone': u'cn-north-1a',
u'primary_dns': u'114.114.114.114',
u'gateway_ip': u'172.21.2.1',
u'vpc_id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'cidr': u'172.21.2.0/24',
u'secondary_dns': u'114.114.115.115',
u'id': u'9fd27cfd-a988-4495-ae7c-c5521d8a5c09'
},
{
u'status': u'ACTIVE',
u'name': u'Subnet2',
u'dhcp_enable': True,
u'availability_zone': u'cn-north-1a',
u'primary_dns': u'114.114.114.114',
u'gateway_ip': u'172.21.1.1',
u'vpc_id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'cidr': u'172.21.1.0/24',
u'secondary_dns': u'114.114.115.115',
u'id': u'd654fe9f-0edc-42f0-a52b-f8c4cb8ac1da'
}]
},
u'status': 200
}
"""
uri = vpc_uri.LIST_SUBNETS.format(project_id=project_id)
if opts:
str_opts = self.convertDictOptsToString(opts)
uri = '?'.join([uri, str_opts])
return self.get(uri)
def list_subnet_detail(self, project_id, subnet_id):
"""
:param project_id: string
:param subnet_id: string
:return: dict
{
u'body': {
u'subnet': {
u'status': u'ACTIVE',
u'name': u'Subnet1',
u'dhcp_enable': True,
u'availability_zone': u'cn-north-1a',
u'primary_dns': u'114.114.114.114',
u'gateway_ip': u'172.21.0.1',
u'vpc_id': u'742cef84-512c-43fb-a469-8e9e87e35459',
u'cidr': u'172.21.0.0/24',
u'secondary_dns': u'114.114.115.115',
u'id': u'7bd9410f-38bb-4fbb-aa7a-cf4a22cb20f3'
}
},
u'status': 200
}
"""
uri = vpc_uri.GET_SUBNET_DETAIL.format(project_id=project_id, subnet_id=subnet_id)
return self.get(uri)
def create_subnet(self, project_id, name, cidr, availability_zone, vpc_id,
gateway_ip=None, dhcp_enable=None, primary_dns=None, secondary_dns=None):
"""
:param project_id: string
:param name: string
:param cidr: string, '172.21.0.0/24'
:param availability_zone: string, 'cn-north-1a'
:param vpc_id: string
:param gateway_ip: string
:param dhcp_enable: string
:param primary_dns: string
:param secondary_dns: string
:return:
"""
uri = vpc_uri.CREATE_SUBNET.format(project_id=project_id)
request_body_dict = {}
subnet_map = {}
subnet_map["name"] = name
subnet_map["cidr"] = cidr
subnet_map["availability_zone"] = availability_zone
subnet_map["vpc_id"] = vpc_id
if gateway_ip:
subnet_map["gateway_ip"] = gateway_ip
if dhcp_enable:
subnet_map["dhcp_enable"] = dhcp_enable
if primary_dns:
subnet_map["primary_dns"] = primary_dns
if secondary_dns:
subnet_map["secondary_dns"] = secondary_dns
request_body_dict["subnet"] = subnet_map
request_body_string = json.dumps(request_body_dict)
return self.post(uri, request_body_string)
| {
"repo_name": "nash-x/hws",
"path": "hwcloud/hws_service/vpc_service.py",
"copies": "1",
"size": "6077",
"license": "apache-2.0",
"hash": 8203170583980075000,
"line_mean": 33.5284090909,
"line_max": 95,
"alpha_frac": 0.4670067468,
"autogenerated": false,
"ratio": 3.3723640399556047,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9309676789409478,
"avg_score": 0.005938799469225236,
"num_lines": 176
} |
__author__ = 'Administrator'
import sqlite3
import sys
import os
import traceback
import time
#from hwcloud import LOG
from heat.openstack.common import log as logging
LOG = logging.getLogger(__name__)
TIMES = 50
INTERVAL = 0.1
def retry(times, interval):
def _wrapper(f):
def __wrapper(*args, **kwargs):
timer = 0
while(True):
try:
if timer <= times:
result = f(*args, **kwargs)
return result
except Exception, e:
LOG.debug('Do DB action Exception: %s' % traceback.format_exc())
if timer < times:
timer += 1
time.sleep(interval)
LOG.error('Start to retry to do db action, TIME: %s' % timer)
continue
else:
LOG.error('Do DB Exception: %s' % traceback.format_exc())
raise e
return __wrapper
return _wrapper
def close_connection():
def _wrapper(func):
def __wrapper(self, *args, **kwargs):
try:
result = func(self, *args, **kwargs)
return result
except Exception, e:
raise e
finally:
self.close()
return __wrapper
return _wrapper
class DatabaseManager(object):
conn = None
def __init__(self):
self.HWS_GATEWAY_DB = 'hws_gateway.db'
self.DB_STORE_PATH = "/home/sqlite_db"
self.CREATE_TABLE_SERVER_ID_MAPPING = \
'''CREATE TABLE server_id_mapping(cascading_server_id text, cascaded_server_id text)'''
self.INSERT_SERVER_ID_MAPPING = \
'''INSERT INTO server_id_mapping(cascading_server_id, cascaded_server_id) VALUES (?,?)'''
self.CREATE_TABLE_SERVER_ID_NAME_MAPPING = \
'''CREATE TABLE server_id_name_mapping(cascading_server_id text, cascaded_server_name text)'''
self.INSERT_SERVER_ID_NAME_MAPPING = \
'''INSERT INTO server_id_name_mapping(cascading_server_id, cascaded_server_name) VALUES (?,?)'''
self.CREATE_TABLE_IMAGE_ID_MAPPING = \
'''CREATE TABLE image_id_mapping(cascading_image_id text, cascaded_image_id text)'''
self.INSERT_IMAGE_ID_MAPPING = \
'''INSERT INTO image_id_mapping(cascading_image_id, cascaded_image_id) VALUES (?,?)'''
self.CREATE_TABLE_FLAVOR_ID_MAPPING = \
'''CREATE TABLE flavor_id_mapping(cascading_flavor_id text, cascaded_flavor_id text)'''
self.INSERT_TABLE_FLAVOR_ID_MAPPING = \
'''INSERT INTO flavor_id_mapping(cascading_flavor_id, cascaded_flavor_id) VALUES (?,?)'''
self.CREATE_TABLE_VOLUME_MAPPING = \
'''CREATE TABLE volume_mapping(cascading_volume_id text, cascaded_volume_id text, cascaded_backup_id text, cascaded_image_id text)'''
self.INSERT_VOLUME_MAPPING = \
'''INSERT INTO volume_mapping(cascading_volume_id, cascaded_volume_id, cascaded_backup_id, cascaded_image_id) VALUES (?, ?, ?, ?)'''
db_full_name = self.get_hws_gateway_db_full_name()
if not os.path.isfile(db_full_name):
self.init_database()
def get_current_dir(self):
return os.path.split(os.path.realpath(__file__))[0]
def get_hws_gateway_db_full_name(self):
full_name = os.path.join(self.DB_STORE_PATH, self.HWS_GATEWAY_DB)
return full_name
def connect(self):
if DatabaseManager.conn is None:
DatabaseManager.conn = sqlite3.connect(self.get_hws_gateway_db_full_name())
return DatabaseManager.conn
def close(self):
if DatabaseManager.conn:
DatabaseManager.conn.close()
DatabaseManager.conn = None
def commit(self):
if DatabaseManager.conn:
DatabaseManager.conn.commit()
def init_database(self):
self.create_table_server_id_mapping()
self.create_table_server_id_name_mapping()
self.create_table_image_id_mapping()
self.create_table_flavor_id_mapping()
self.create_table_volume_mapping()
@retry(TIMES, INTERVAL)
@close_connection()
def add_server_id_mapping(self, cascading_server_id, cascaded_server_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_SERVER_ID_MAPPING
cursor.execute(exe_sql, (cascading_server_id, cascaded_server_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_server_id_by_cascading_id(self, cascading_server_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM server_id_mapping WHERE cascading_server_id = ?"
data = [cascading_server_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_server_id(self, cascading_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_server_id FROM server_id_mapping "
"WHERE cascading_server_id = '%s'"
% cascading_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_server_id_name_mapping(self, cascading_server_id, cascaded_server_name):
cursor = self.connect().cursor()
exe_sql = self.INSERT_SERVER_ID_NAME_MAPPING
cursor.execute(exe_sql, (cascading_server_id, cascaded_server_name))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_server_name(self, cascading_server_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_server_name FROM server_id_name_mapping "
"WHERE cascading_server_id = '%s'" % cascading_server_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def delete_server_id_name_by_cascading_id(self, cascading_server_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM server_id_name_mapping WHERE cascading_server_id = ?"
data = [cascading_server_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def add_image_id_mapping(self, cascading_image_id, cascaded_image_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_IMAGE_ID_MAPPING
cursor.execute(exe_sql, (cascading_image_id, cascaded_image_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_image_id_mapping(self, cascading_image_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM image_id_mapping WHERE cascading_image_id = ?"
data = [cascading_image_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_image_id(self, cascading_image_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_image_id FROM image_id_mapping "
"WHERE cascading_image_id = '%s'" % cascading_image_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_flavor_id_mapping(self, cascading_flavor_id, cascaded_flavor_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_TABLE_FLAVOR_ID_MAPPING
cursor.execute(exe_sql, (cascading_flavor_id, cascaded_flavor_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_flavor_id_mapping(self, cascading_flavor_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM flavor_id_mapping WHERE cascading_flavor_id = ?"
data = [cascading_flavor_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_flavor_id(self, cascading_flavor_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_flavor_id FROM flavor_id_mapping "
"WHERE cascading_flavor_id = '%s'" % cascading_flavor_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_volume_mapping(self, cascading_volume_id, cascaded_volume_id,
cascaded_backup_id=None, cascaded_image_id=None):
cursor = self.connect().cursor()
exe_sql = self.INSERT_VOLUME_MAPPING
cursor.execute(exe_sql, (cascading_volume_id, cascaded_volume_id, cascaded_backup_id, cascaded_image_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_volume_id(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_volume_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_backup_in_volume_mapping(self, cascading_volume_id, cascaded_backup_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascading_volume_id='%s' WHERE cascaded_volume_id='%s'" %
(cascaded_backup_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_image_in_volume_mapping(self, cascading_volume_id, cascaded_image_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascading_volume_id='%s' WHERE cascaded_volume_id='%s'" %
(cascaded_image_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_volume_in_volume_mapping(self, cascading_volume_id, cascaded_volume_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascaded_volume_id='%s' WHERE cascading_volume_id='%s'" %
(cascaded_volume_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_backup(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_backup_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_backup_image(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_image_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def delete_volume_mapping(self, cascading_volume_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM volume_mapping WHERE cascading_volume_id = ?"
data = [cascading_volume_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def drop_table(self, table_name):
cursor = self.connect().cursor()
cursor.execute('drop table if exists %s' % table_name)
self.commit()
def create_tables(self, create_table_sql_list):
"""
:param table_name_list: list of table names.
:return:
"""
cursor = self.connect().cursor()
for create_table_sql in create_table_sql_list:
cursor.execute(create_table_sql)
self.commit()
def drop_all_tables(self):
self.drop_table_server_id_mapping()
self.drop_table_server_id_name_mapping()
self.drop_table_image_id_mapping()
self.drop_table_flavor_id_mapping()
self.drop_table_volume_mapping()
def create_table(self, create_table_sql):
self.create_tables([create_table_sql])
def drop_table_server_id_mapping(self):
self.drop_table('server_id_mapping')
def drop_table_server_id_name_mapping(self):
self.drop_table('server_id_name_mapping')
def drop_table_image_id_mapping(self):
self.drop_table('image_id_mapping')
def drop_table_flavor_id_mapping(self):
self.drop_table('flavor_id_mapping')
def drop_table_volume_mapping(self):
self.drop_table('volume_mapping')
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_image_id_mapping(self):
if not self.is_table_exist('image_id_mapping'):
self.create_table(self.CREATE_TABLE_IMAGE_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_server_id_mapping(self):
if not self.is_table_exist('server_id_mapping'):
self.create_table(self.CREATE_TABLE_SERVER_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_server_id_name_mapping(self):
if not self.is_table_exist('server_id_name_mapping'):
self.create_table(self.CREATE_TABLE_SERVER_ID_NAME_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_flavor_id_mapping(self):
if not self.is_table_exist('flavor_id_mapping'):
self.create_table(self.CREATE_TABLE_FLAVOR_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_volume_mapping(self):
if not self.is_table_exist('volume_mapping'):
self.create_table(self.CREATE_TABLE_VOLUME_MAPPING)
def is_table_exist(self, table_name):
cursor = self.connect().cursor()
sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='%s'" % table_name
cursor.execute(sql)
row = cursor.fetchone()
if row:
return True
return False
def print_option():
print('Support options:')
print(' add_image_mapping [CASCADING_IMAGE_ID] [CASCADED_IMAGE_ID]')
print(' add_flavor_mapping [CASCADING_FLAVOR_ID] [CASCADED_FLAVOR_ID]')
print(' add_server_mapping [CASCADING_SERVER_ID] [CASCADED_SERVER_ID]')
print(' get_cascaded_image [CASCADING_IMAGE_ID]')
print(' get_cascaded_flavor [CASCADING_FLAVOR_ID]')
print(' get_cascaded_server [CASCADING_SERVER_ID]')
if __name__ == '__main__':
database_manager = DatabaseManager()
if len(sys.argv) <= 1:
database_manager.close()
print('please enter option.')
print_option()
exit(0)
mode = sys.argv[1]
if mode == 'init_db':
print('Start to create database for Database Manager >>>>>>')
database_manager.init_database()
print('End to create database for Database Manager >>>>>>')
elif mode == 'drop_db':
print('Start to drop database for Database Manager >>>>>>')
database_manager.drop_all_tables()
print('Finish to drop database for Database Manager >>>>>>')
elif mode == 'add_image_mapping':
if len(sys.argv) == 4:
cascading_image_id = sys.argv[2]
cascaded_image_id = sys.argv[3]
database_manager.add_image_id_mapping(cascading_image_id, cascaded_image_id)
elif mode == 'add_flavor_mapping':
if len(sys.argv) == 4:
cascading_flavor_id = sys.argv[2]
cascaded_flavor_id = sys.argv[3]
database_manager.add_flavor_id_mapping(cascading_flavor_id, cascaded_flavor_id)
elif mode == 'get_cascaded_image':
if len(sys.argv) == 3:
cascading_image_id = sys.argv[2]
cascaded_image_id = database_manager.get_cascaded_image_id(cascading_image_id)
print('cascaded image id: %s' % cascaded_image_id)
elif mode == 'get_cascaded_flavor':
if len(sys.argv) == 3:
cascading_flavor_id = sys.argv[2]
cascaded_flavor_id = database_manager.get_cascaded_flavor_id(cascading_flavor_id)
print('cascaded flavor id: %s' % cascaded_flavor_id)
elif mode == 'get_cascaded_server':
if len(sys.argv) == 3:
cascading_server_id = sys.argv[2]
cascaded_server_id = database_manager.get_cascaded_server_id(cascading_server_id)
print('cascaded server id: %s' % cascaded_server_id)
elif mode == 'add_server_mapping':
if len(sys.argv) == 4:
cascading_server_id = sys.argv[2]
cascaded_server_id = sys.argv[3]
database_manager.add_server_id_mapping(cascading_server_id, cascaded_server_id)
elif mode == 'delete_image_mapping':
if len(sys.argv) == 3:
cascading_image_id = sys.argv[2]
cascaded_image_id = database_manager.delete_image_id_mapping(cascading_image_id)
print('delete image mapping of cascading image: %s' % cascading_image_id)
elif mode == '--help':
print_option()
else:
print('Not support option: %s' % mode)
print_option()
database_manager.close()
| {
"repo_name": "Hybrid-Cloud/orchard",
"path": "code/hwcloud/database_manager.py",
"copies": "2",
"size": "17589",
"license": "apache-2.0",
"hash": -8404873660204930000,
"line_mean": 36.2648305085,
"line_max": 145,
"alpha_frac": 0.6042981409,
"autogenerated": false,
"ratio": 3.519911947168301,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001432999086915872,
"num_lines": 472
} |
__author__ = 'Administrator'
import sqlite3
import sys
import os
import traceback
import time
from hwcloud import LOG
TIMES = 50
INTERVAL = 0.1
def retry(times, interval):
def _wrapper(f):
def __wrapper(*args, **kwargs):
timer = 0
while(True):
try:
if timer <= times:
result = f(*args, **kwargs)
return result
except Exception, e:
LOG.debug('Do DB action Exception: %s' % traceback.format_exc())
if timer < times:
timer += 1
time.sleep(interval)
LOG.error('Start to retry to do db action, TIME: %s' % timer)
continue
else:
LOG.error('Do DB Exception: %s' % traceback.format_exc())
raise e
return __wrapper
return _wrapper
def close_connection():
def _wrapper(func):
def __wrapper(self, *args, **kwargs):
try:
result = func(self, *args, **kwargs)
return result
except Exception, e:
raise e
finally:
self.close()
return __wrapper
return _wrapper
class DatabaseManager(object):
conn = None
def __init__(self):
self.HWS_GATEWAY_DB = 'hws_gateway.db'
self.DB_STORE_PATH = "/home/sqlite_db"
self.CREATE_TABLE_SERVER_ID_MAPPING = \
'''CREATE TABLE server_id_mapping(cascading_server_id text, cascaded_server_id text)'''
self.INSERT_SERVER_ID_MAPPING = \
'''INSERT INTO server_id_mapping(cascading_server_id, cascaded_server_id) VALUES (?,?)'''
self.CREATE_TABLE_SERVER_ID_NAME_MAPPING = \
'''CREATE TABLE server_id_name_mapping(cascading_server_id text, cascaded_server_name text)'''
self.INSERT_SERVER_ID_NAME_MAPPING = \
'''INSERT INTO server_id_name_mapping(cascading_server_id, cascaded_server_name) VALUES (?,?)'''
self.CREATE_TABLE_IMAGE_ID_MAPPING = \
'''CREATE TABLE image_id_mapping(cascading_image_id text, cascaded_image_id text)'''
self.INSERT_IMAGE_ID_MAPPING = \
'''INSERT INTO image_id_mapping(cascading_image_id, cascaded_image_id) VALUES (?,?)'''
self.CREATE_TABLE_FLAVOR_ID_MAPPING = \
'''CREATE TABLE flavor_id_mapping(cascading_flavor_id text, cascaded_flavor_id text)'''
self.INSERT_TABLE_FLAVOR_ID_MAPPING = \
'''INSERT INTO flavor_id_mapping(cascading_flavor_id, cascaded_flavor_id) VALUES (?,?)'''
self.CREATE_TABLE_VOLUME_MAPPING = \
'''CREATE TABLE volume_mapping(cascading_volume_id text, cascaded_volume_id text, cascaded_backup_id text, cascaded_image_id text)'''
self.INSERT_VOLUME_MAPPING = \
'''INSERT INTO volume_mapping(cascading_volume_id, cascaded_volume_id, cascaded_backup_id, cascaded_image_id) VALUES (?, ?, ?, ?)'''
db_full_name = self.get_hws_gateway_db_full_name()
if not os.path.isfile(db_full_name):
self.init_database()
def get_current_dir(self):
return os.path.split(os.path.realpath(__file__))[0]
def get_hws_gateway_db_full_name(self):
full_name = os.path.join(self.DB_STORE_PATH, self.HWS_GATEWAY_DB)
return full_name
def connect(self):
if DatabaseManager.conn is None:
DatabaseManager.conn = sqlite3.connect(self.get_hws_gateway_db_full_name())
return DatabaseManager.conn
def close(self):
if DatabaseManager.conn:
DatabaseManager.conn.close()
DatabaseManager.conn = None
def commit(self):
if DatabaseManager.conn:
DatabaseManager.conn.commit()
def init_database(self):
self.create_table_server_id_mapping()
self.create_table_server_id_name_mapping()
self.create_table_image_id_mapping()
self.create_table_flavor_id_mapping()
self.create_table_volume_mapping()
@retry(TIMES, INTERVAL)
@close_connection()
def add_server_id_mapping(self, cascading_server_id, cascaded_server_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_SERVER_ID_MAPPING
cursor.execute(exe_sql, (cascading_server_id, cascaded_server_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_server_id_by_cascading_id(self, cascading_server_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM server_id_mapping WHERE cascading_server_id = ?"
data = [cascading_server_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_server_id(self, cascading_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_server_id FROM server_id_mapping "
"WHERE cascading_server_id = '%s'"
% cascading_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_server_id_name_mapping(self, cascading_server_id, cascaded_server_name):
cursor = self.connect().cursor()
exe_sql = self.INSERT_SERVER_ID_NAME_MAPPING
cursor.execute(exe_sql, (cascading_server_id, cascaded_server_name))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_server_name(self, cascading_server_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_server_name FROM server_id_name_mapping "
"WHERE cascading_server_id = '%s'" % cascading_server_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def delete_server_id_name_by_cascading_id(self, cascading_server_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM server_id_name_mapping WHERE cascading_server_id = ?"
data = [cascading_server_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def add_image_id_mapping(self, cascading_image_id, cascaded_image_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_IMAGE_ID_MAPPING
cursor.execute(exe_sql, (cascading_image_id, cascaded_image_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_image_id_mapping(self, cascading_image_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM image_id_mapping WHERE cascading_image_id = ?"
data = [cascading_image_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_image_id(self, cascading_image_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_image_id FROM image_id_mapping "
"WHERE cascading_image_id = '%s'" % cascading_image_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_flavor_id_mapping(self, cascading_flavor_id, cascaded_flavor_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_TABLE_FLAVOR_ID_MAPPING
cursor.execute(exe_sql, (cascading_flavor_id, cascaded_flavor_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_flavor_id_mapping(self, cascading_flavor_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM flavor_id_mapping WHERE cascading_flavor_id = ?"
data = [cascading_flavor_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_flavor_id(self, cascading_flavor_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_flavor_id FROM flavor_id_mapping "
"WHERE cascading_flavor_id = '%s'" % cascading_flavor_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_volume_mapping(self, cascading_volume_id, cascaded_volume_id,
cascaded_backup_id=None, cascaded_image_id=None):
cursor = self.connect().cursor()
exe_sql = self.INSERT_VOLUME_MAPPING
cursor.execute(exe_sql, (cascading_volume_id, cascaded_volume_id, cascaded_backup_id, cascaded_image_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_volume_id(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_volume_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_backup_in_volume_mapping(self, cascading_volume_id, cascaded_backup_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascading_volume_id='%s' WHERE cascaded_volume_id='%s'" %
(cascaded_backup_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_image_in_volume_mapping(self, cascading_volume_id, cascaded_image_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascading_volume_id='%s' WHERE cascaded_volume_id='%s'" %
(cascaded_image_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_volume_in_volume_mapping(self, cascading_volume_id, cascaded_volume_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascaded_volume_id='%s' WHERE cascading_volume_id='%s'" %
(cascaded_volume_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_backup(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_backup_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_backup_image(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_image_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def delete_volume_mapping(self, cascading_volume_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM volume_mapping WHERE cascading_volume_id = ?"
data = [cascading_volume_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def drop_table(self, table_name):
cursor = self.connect().cursor()
cursor.execute('drop table if exists %s' % table_name)
self.commit()
def create_tables(self, create_table_sql_list):
"""
:param table_name_list: list of table names.
:return:
"""
cursor = self.connect().cursor()
for create_table_sql in create_table_sql_list:
cursor.execute(create_table_sql)
self.commit()
def drop_all_tables(self):
self.drop_table_server_id_mapping()
self.drop_table_server_id_name_mapping()
self.drop_table_image_id_mapping()
self.drop_table_flavor_id_mapping()
self.drop_table_volume_mapping()
def create_table(self, create_table_sql):
self.create_tables([create_table_sql])
def drop_table_server_id_mapping(self):
self.drop_table('server_id_mapping')
def drop_table_server_id_name_mapping(self):
self.drop_table('server_id_name_mapping')
def drop_table_image_id_mapping(self):
self.drop_table('image_id_mapping')
def drop_table_flavor_id_mapping(self):
self.drop_table('flavor_id_mapping')
def drop_table_volume_mapping(self):
self.drop_table('volume_mapping')
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_image_id_mapping(self):
if not self.is_table_exist('image_id_mapping'):
self.create_table(self.CREATE_TABLE_IMAGE_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_server_id_mapping(self):
if not self.is_table_exist('server_id_mapping'):
self.create_table(self.CREATE_TABLE_SERVER_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_server_id_name_mapping(self):
if not self.is_table_exist('server_id_name_mapping'):
self.create_table(self.CREATE_TABLE_SERVER_ID_NAME_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_flavor_id_mapping(self):
if not self.is_table_exist('flavor_id_mapping'):
self.create_table(self.CREATE_TABLE_FLAVOR_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_volume_mapping(self):
if not self.is_table_exist('volume_mapping'):
self.create_table(self.CREATE_TABLE_VOLUME_MAPPING)
def is_table_exist(self, table_name):
cursor = self.connect().cursor()
sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='%s'" % table_name
cursor.execute(sql)
row = cursor.fetchone()
if row:
return True
return False
def print_option():
print('Support options:')
print(' add_image_mapping [CASCADING_IMAGE_ID] [CASCADED_IMAGE_ID]')
print(' add_flavor_mapping [CASCADING_FLAVOR_ID] [CASCADED_FLAVOR_ID]')
print(' add_server_mapping [CASCADING_SERVER_ID] [CASCADED_SERVER_ID]')
print(' get_cascaded_image [CASCADING_IMAGE_ID]')
print(' get_cascaded_flavor [CASCADING_FLAVOR_ID]')
print(' get_cascaded_server [CASCADING_SERVER_ID]')
if __name__ == '__main__':
database_manager = DatabaseManager()
if len(sys.argv) <= 1:
database_manager.close()
print('please enter option.')
print_option()
exit(0)
mode = sys.argv[1]
if mode == 'init_db':
print('Start to create database for Database Manager >>>>>>')
database_manager.init_database()
print('End to create database for Database Manager >>>>>>')
elif mode == 'drop_db':
print('Start to drop database for Database Manager >>>>>>')
database_manager.drop_all_tables()
print('Finish to drop database for Database Manager >>>>>>')
elif mode == 'add_image_mapping':
if len(sys.argv) == 4:
cascading_image_id = sys.argv[2]
cascaded_image_id = sys.argv[3]
database_manager.add_image_id_mapping(cascading_image_id, cascaded_image_id)
elif mode == 'add_flavor_mapping':
if len(sys.argv) == 4:
cascading_flavor_id = sys.argv[2]
cascaded_flavor_id = sys.argv[3]
database_manager.add_flavor_id_mapping(cascading_flavor_id, cascaded_flavor_id)
elif mode == 'get_cascaded_image':
if len(sys.argv) == 3:
cascading_image_id = sys.argv[2]
cascaded_image_id = database_manager.get_cascaded_image_id(cascading_image_id)
print('cascaded image id: %s' % cascaded_image_id)
elif mode == 'get_cascaded_flavor':
if len(sys.argv) == 3:
cascading_flavor_id = sys.argv[2]
cascaded_flavor_id = database_manager.get_cascaded_flavor_id(cascading_flavor_id)
print('cascaded flavor id: %s' % cascaded_flavor_id)
elif mode == 'get_cascaded_server':
if len(sys.argv) == 3:
cascading_server_id = sys.argv[2]
cascaded_server_id = database_manager.get_cascaded_server_id(cascading_server_id)
print('cascaded server id: %s' % cascaded_server_id)
elif mode == 'add_server_mapping':
if len(sys.argv) == 4:
cascading_server_id = sys.argv[2]
cascaded_server_id = sys.argv[3]
database_manager.add_server_id_mapping(cascading_server_id, cascaded_server_id)
elif mode == 'delete_image_mapping':
if len(sys.argv) == 3:
cascading_image_id = sys.argv[2]
cascaded_image_id = database_manager.delete_image_id_mapping(cascading_image_id)
print('delete image mapping of cascading image: %s' % cascading_image_id)
elif mode == '--help':
print_option()
else:
print('Not support option: %s' % mode)
print_option()
database_manager.close() | {
"repo_name": "nash-x/hws",
"path": "hwcloud/database_manager.py",
"copies": "1",
"size": "17502",
"license": "apache-2.0",
"hash": -3169718230779795000,
"line_mean": 36.3995726496,
"line_max": 145,
"alpha_frac": 0.6036452977,
"autogenerated": false,
"ratio": 3.5186972255729794,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9615162076106969,
"avg_score": 0.001436089433202088,
"num_lines": 468
} |
__author__ = 'Administrator'
import sqlite3
import sys
import os
import traceback
import time
from log import LOG
TIMES = 50
INTERVAL = 0.1
def retry(times, interval):
def _wrapper(f):
def __wrapper(*args, **kwargs):
timer = 0
while(True):
try:
if timer <= times:
result = f(*args, **kwargs)
return result
except Exception, e:
LOG.debug('Do DB action Exception: %s' % traceback.format_exc())
if timer < times:
timer += 1
time.sleep(interval)
LOG.error('Start to retry to do db action, TIME: %s' % timer)
continue
else:
LOG.error('Do DB Exception: %s' % traceback.format_exc())
raise e
return __wrapper
return _wrapper
def close_connection():
def _wrapper(func):
def __wrapper(self, *args, **kwargs):
try:
result = func(self, *args, **kwargs)
return result
except Exception, e:
raise e
finally:
self.close()
return __wrapper
return _wrapper
class DatabaseManager(object):
conn = None
def __init__(self):
self.HWS_GATEWAY_DB = 'hws_gateway.db'
self.DB_STORE_PATH = "/home/sqlite_db"
self.CREATE_TABLE_SERVER_ID_MAPPING = \
'''CREATE TABLE server_id_mapping(cascading_server_id text, cascaded_server_id text)'''
self.INSERT_SERVER_ID_MAPPING = \
'''INSERT INTO server_id_mapping(cascading_server_id, cascaded_server_id) VALUES (?,?)'''
self.CREATE_TABLE_SERVER_ID_NAME_MAPPING = \
'''CREATE TABLE server_id_name_mapping(cascading_server_id text, cascaded_server_name text)'''
self.INSERT_SERVER_ID_NAME_MAPPING = \
'''INSERT INTO server_id_name_mapping(cascading_server_id, cascaded_server_name) VALUES (?,?)'''
self.CREATE_TABLE_IMAGE_ID_MAPPING = \
'''CREATE TABLE image_id_mapping(cascading_image_id text, cascaded_image_id text)'''
self.INSERT_IMAGE_ID_MAPPING = \
'''INSERT INTO image_id_mapping(cascading_image_id, cascaded_image_id) VALUES (?,?)'''
self.CREATE_TABLE_FLAVOR_ID_MAPPING = \
'''CREATE TABLE flavor_id_mapping(cascading_flavor_id text, cascaded_flavor_id text)'''
self.INSERT_TABLE_FLAVOR_ID_MAPPING = \
'''INSERT INTO flavor_id_mapping(cascading_flavor_id, cascaded_flavor_id) VALUES (?,?)'''
self.CREATE_TABLE_VOLUME_MAPPING = \
'''CREATE TABLE volume_mapping(cascading_volume_id text, cascaded_volume_id text, cascaded_backup_id text, cascaded_image_id text)'''
self.INSERT_VOLUME_MAPPING = \
'''INSERT INTO volume_mapping(cascading_volume_id, cascaded_volume_id, cascaded_backup_id, cascaded_image_id) VALUES (?, ?, ?, ?)'''
db_full_name = self.get_hws_gateway_db_full_name()
if not os.path.isfile(db_full_name):
self.init_database()
def get_current_dir(self):
return os.path.split(os.path.realpath(__file__))[0]
def get_hws_gateway_db_full_name(self):
full_name = os.path.join(self.DB_STORE_PATH, self.HWS_GATEWAY_DB)
return full_name
def connect(self):
if DatabaseManager.conn is None:
DatabaseManager.conn = sqlite3.connect(self.get_hws_gateway_db_full_name())
return DatabaseManager.conn
def close(self):
if DatabaseManager.conn:
DatabaseManager.conn.close()
DatabaseManager.conn = None
def commit(self):
if DatabaseManager.conn:
DatabaseManager.conn.commit()
def init_database(self):
self.create_table_server_id_mapping()
self.create_table_server_id_name_mapping()
self.create_table_image_id_mapping()
self.create_table_flavor_id_mapping()
self.create_table_volume_mapping()
@retry(TIMES, INTERVAL)
@close_connection()
def add_server_id_mapping(self, cascading_server_id, cascaded_server_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_SERVER_ID_MAPPING
cursor.execute(exe_sql, (cascading_server_id, cascaded_server_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_server_id_by_cascading_id(self, cascading_server_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM server_id_mapping WHERE cascading_server_id = ?"
data = [cascading_server_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_server_id(self, cascading_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_server_id FROM server_id_mapping "
"WHERE cascading_server_id = '%s'"
% cascading_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_server_id_name_mapping(self, cascading_server_id, cascaded_server_name):
cursor = self.connect().cursor()
exe_sql = self.INSERT_SERVER_ID_NAME_MAPPING
cursor.execute(exe_sql, (cascading_server_id, cascaded_server_name))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_server_name(self, cascading_server_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_server_name FROM server_id_name_mapping "
"WHERE cascading_server_id = '%s'" % cascading_server_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def delete_server_id_name_by_cascading_id(self, cascading_server_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM server_id_name_mapping WHERE cascading_server_id = ?"
data = [cascading_server_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def add_image_id_mapping(self, cascading_image_id, cascaded_image_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_IMAGE_ID_MAPPING
cursor.execute(exe_sql, (cascading_image_id, cascaded_image_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_image_id_mapping(self, cascading_image_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM image_id_mapping WHERE cascading_image_id = ?"
data = [cascading_image_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_image_id(self, cascading_image_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_image_id FROM image_id_mapping "
"WHERE cascading_image_id = '%s'" % cascading_image_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_flavor_id_mapping(self, cascading_flavor_id, cascaded_flavor_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_TABLE_FLAVOR_ID_MAPPING
cursor.execute(exe_sql, (cascading_flavor_id, cascaded_flavor_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_flavor_id_mapping(self, cascading_flavor_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM flavor_id_mapping WHERE cascading_flavor_id = ?"
data = [cascading_flavor_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_flavor_id(self, cascading_flavor_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_flavor_id FROM flavor_id_mapping "
"WHERE cascading_flavor_id = '%s'" % cascading_flavor_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_volume_mapping(self, cascading_volume_id, cascaded_volume_id,
cascaded_backup_id=None, cascaded_image_id=None):
cursor = self.connect().cursor()
exe_sql = self.INSERT_VOLUME_MAPPING
cursor.execute(exe_sql, (cascading_volume_id, cascaded_volume_id, cascaded_backup_id, cascaded_image_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_volume_id(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_volume_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_backup_in_volume_mapping(self, cascading_volume_id, cascaded_backup_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascading_volume_id='%s' WHERE cascaded_volume_id='%s'" %
(cascaded_backup_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_image_in_volume_mapping(self, cascading_volume_id, cascaded_image_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascading_volume_id='%s' WHERE cascaded_volume_id='%s'" %
(cascaded_image_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_volume_in_volume_mapping(self, cascading_volume_id, cascaded_volume_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascaded_volume_id='%s' WHERE cascading_volume_id='%s'" %
(cascaded_volume_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_backup(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_backup_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_backup_image(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_image_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def delete_volume_mapping(self, cascading_volume_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM volume_mapping WHERE cascading_volume_id = ?"
data = [cascading_volume_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def drop_table(self, table_name):
cursor = self.connect().cursor()
cursor.execute('drop table if exists %s' % table_name)
self.commit()
def create_tables(self, create_table_sql_list):
"""
:param table_name_list: list of table names.
:return:
"""
cursor = self.connect().cursor()
for create_table_sql in create_table_sql_list:
cursor.execute(create_table_sql)
self.commit()
def drop_all_tables(self):
self.drop_table_server_id_mapping()
self.drop_table_server_id_name_mapping()
self.drop_table_image_id_mapping()
self.drop_table_flavor_id_mapping()
self.drop_table_volume_mapping()
def create_table(self, create_table_sql):
self.create_tables([create_table_sql])
def drop_table_server_id_mapping(self):
self.drop_table('server_id_mapping')
def drop_table_server_id_name_mapping(self):
self.drop_table('server_id_name_mapping')
def drop_table_image_id_mapping(self):
self.drop_table('image_id_mapping')
def drop_table_flavor_id_mapping(self):
self.drop_table('flavor_id_mapping')
def drop_table_volume_mapping(self):
self.drop_table('volume_mapping')
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_image_id_mapping(self):
if not self.is_table_exist('image_id_mapping'):
self.create_table(self.CREATE_TABLE_IMAGE_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_server_id_mapping(self):
if not self.is_table_exist('server_id_mapping'):
self.create_table(self.CREATE_TABLE_SERVER_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_server_id_name_mapping(self):
if not self.is_table_exist('server_id_name_mapping'):
self.create_table(self.CREATE_TABLE_SERVER_ID_NAME_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_flavor_id_mapping(self):
if not self.is_table_exist('flavor_id_mapping'):
self.create_table(self.CREATE_TABLE_FLAVOR_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_volume_mapping(self):
if not self.is_table_exist('volume_mapping'):
self.create_table(self.CREATE_TABLE_VOLUME_MAPPING)
def is_table_exist(self, table_name):
cursor = self.connect().cursor()
sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='%s'" % table_name
cursor.execute(sql)
row = cursor.fetchone()
if row:
return True
return False
def print_option():
print('Support options:')
print(' add_image_mapping [CASCADING_IMAGE_ID] [CASCADED_IMAGE_ID]')
print(' add_flavor_mapping [CASCADING_FLAVOR_ID] [CASCADED_FLAVOR_ID]')
print(' add_server_mapping [CASCADING_SERVER_ID] [CASCADED_SERVER_ID]')
print(' get_cascaded_image [CASCADING_IMAGE_ID]')
print(' get_cascaded_flavor [CASCADING_FLAVOR_ID]')
print(' get_cascaded_server [CASCADING_SERVER_ID]')
if __name__ == '__main__':
database_manager = DatabaseManager()
if len(sys.argv) <= 1:
database_manager.close()
print('please enter option.')
print_option()
exit(0)
mode = sys.argv[1]
if mode == 'init_db':
print('Start to create database for Database Manager >>>>>>')
database_manager.init_database()
print('End to create database for Database Manager >>>>>>')
elif mode == 'drop_db':
print('Start to drop database for Database Manager >>>>>>')
database_manager.drop_all_tables()
print('Finish to drop database for Database Manager >>>>>>')
elif mode == 'add_image_mapping':
if len(sys.argv) == 4:
cascading_image_id = sys.argv[2]
cascaded_image_id = sys.argv[3]
database_manager.add_image_id_mapping(cascading_image_id, cascaded_image_id)
elif mode == 'add_flavor_mapping':
if len(sys.argv) == 4:
cascading_flavor_id = sys.argv[2]
cascaded_flavor_id = sys.argv[3]
database_manager.add_flavor_id_mapping(cascading_flavor_id, cascaded_flavor_id)
elif mode == 'get_cascaded_image':
if len(sys.argv) == 3:
cascading_image_id = sys.argv[2]
cascaded_image_id = database_manager.get_cascaded_image_id(cascading_image_id)
print('cascaded image id: %s' % cascaded_image_id)
elif mode == 'get_cascaded_flavor':
if len(sys.argv) == 3:
cascading_flavor_id = sys.argv[2]
cascaded_flavor_id = database_manager.get_cascaded_flavor_id(cascading_flavor_id)
print('cascaded flavor id: %s' % cascaded_flavor_id)
elif mode == 'get_cascaded_server':
if len(sys.argv) == 3:
cascading_server_id = sys.argv[2]
cascaded_server_id = database_manager.get_cascaded_server_id(cascading_server_id)
print('cascaded server id: %s' % cascaded_server_id)
elif mode == 'add_server_mapping':
if len(sys.argv) == 4:
cascading_server_id = sys.argv[2]
cascaded_server_id = sys.argv[3]
database_manager.add_server_id_mapping(cascading_server_id, cascaded_server_id)
elif mode == 'delete_image_mapping':
if len(sys.argv) == 3:
cascading_image_id = sys.argv[2]
cascaded_image_id = database_manager.delete_image_id_mapping(cascading_image_id)
print('delete image mapping of cascading image: %s' % cascading_image_id)
elif mode == '--help':
print_option()
else:
print('Not support option: %s' % mode)
print_option()
database_manager.close() | {
"repo_name": "hgqislub/hybird-orchard",
"path": "etc/hybrid_cloud/scripts/patches/patches_tool/hws_patch/database_manager.py",
"copies": "1",
"size": "17498",
"license": "apache-2.0",
"hash": 910259930782546600,
"line_mean": 36.391025641,
"line_max": 145,
"alpha_frac": 0.603554692,
"autogenerated": false,
"ratio": 3.5186004423889,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.961497468722289,
"avg_score": 0.001436089433202088,
"num_lines": 468
} |
__author__ = 'Administrator'
import time
import traceback
from functools import wraps
from wormholeclient.client import Client
from wormholeclient import constants as wormhole_constants
from nova.virt.hws.exception_ex import RetryException
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class RetryDecorator(object):
"""Decorator for retrying a function upon suggested exceptions.
The decorated function is retried for the given number of times, and the
sleep time between the retries is incremented until max sleep time is
reached. If the max retry count is set to -1, then the decorated function
is invoked indefinitely until an exception is thrown, and the caught
exception is not in the list of suggested exceptions.
"""
def __init__(self, max_retry_count=-1, inc_sleep_time=5,
max_sleep_time=60, exceptions=()):
"""Configure the retry object using the input params.
:param max_retry_count: maximum number of times the given function must
be retried when one of the input 'exceptions'
is caught. When set to -1, it will be retried
indefinitely until an exception is thrown
and the caught exception is not in param
exceptions.
:param inc_sleep_time: incremental time in seconds for sleep time
between retries
:param max_sleep_time: max sleep time in seconds beyond which the sleep
time will not be incremented using param
inc_sleep_time. On reaching this threshold,
max_sleep_time will be used as the sleep time.
:param exceptions: suggested exceptions for which the function must be
retried
"""
self._max_retry_count = max_retry_count
self._inc_sleep_time = inc_sleep_time
self._max_sleep_time = max_sleep_time
self._exceptions = exceptions
self._retry_count = 0
self._sleep_time = 0
def __call__(self, f):
@wraps(f)
def f_retry(*args, **kwargs):
max_retries, mdelay = self._max_retry_count, self._inc_sleep_time
while max_retries > 1:
try:
return f(*args, **kwargs)
except self._exceptions as e:
LOG.error('retry times: %s, exception: %s' %
(str(self._max_retry_count - max_retries), traceback.format_exc(e)))
time.sleep(mdelay)
max_retries -= 1
if mdelay >= self._max_sleep_time:
mdelay = self._max_sleep_time
if max_retries == 1:
msg = 'func: %s, retry times: %s, failed' % (f.__name__, str(self._max_retry_count))
LOG.error(msg)
return f(*args, **kwargs)
return f_retry
class WormHoleBusiness(object):
def __init__(self, wormhole_service_port):
self.wormhole_service_port = wormhole_service_port
self.clients = self.get_clients()
def get_clients(self):
raise NotImplementedError()
def get_version(self):
docker_version = self._run_function_of_clients('get_version')
return docker_version
def restart_container(self, network_info, block_device_info):
return self._run_function_of_clients('restart_container', network_info=network_info,
block_device_info=block_device_info)
def start_container(self, network_info, block_device_info):
return self._run_function_of_clients('start_container', network_info=network_info,
block_device_info=block_device_info)
def restart_container(self, network_info, block_device_info):
return self._run_function_of_clients('restart_container', network_info=network_info,
block_device_info=block_device_info)
def stop_container(self):
return self._run_function_of_clients('stop_container')
def create_container(self, name, image_uuid, injected_files, admin_password, network_info,
block_device_info):
return self._run_function_of_clients('create_container', image_name=name,
image_id=image_uuid, root_volume_id=None,
network_info=network_info, block_device_info=block_device_info,
inject_files=injected_files, admin_password=admin_password,
timeout=10)
def inject_file(self, dst_path, src_path=None, file_data=None, timeout=10):
return self._run_function_of_clients('inject_file', dst_path=dst_path, src_path=src_path,
file_data=file_data, timeout=timeout)
def list_volume(self):
return self._run_function_of_clients('list_volume')
def attach_volume(self, volume_id, device, mount_device, timeout=10):
return self._run_function_of_clients('attach_volume', volume_id=volume_id,
device=device, mount_device=mount_device, timeout=timeout)
def create_image(self, image_name, image_id, timeout=10):
return self._run_function_of_clients('create_image', image_name=image_name,
image_id=image_id, timeout=timeout)
def image_info(self, image_name, image_id):
return self._run_function_of_clients('image_info', image_name=image_name, image_id=image_id)
def query_task(self, task, timeout=10):
return self._run_function_of_clients('query_task', task=task, timeout=timeout)
@RetryDecorator(max_retry_count=50, inc_sleep_time=5, max_sleep_time=60, exceptions=(RetryException))
def _run_function_of_clients(self, function_name, *args, **kwargs):
result = None
tmp_except = Exception('tmp exception when doing function: %s' % function_name)
for client in self.clients:
func = getattr(client, function_name)
if func:
try:
result = func(*args, **kwargs)
LOG.debug('Finish to execute %s' % function_name)
break
except Exception, e:
tmp_except = e
continue
else:
raise Exception('There is not such function >%s< in wormhole client.' % function_name)
if not result:
LOG.debug('exception is: %s' % traceback.format_exc(tmp_except))
raise RetryException(error_info=tmp_except.message)
return result
@RetryDecorator(max_retry_count=50, inc_sleep_time=5, max_sleep_time=60,
exceptions=(RetryException))
def wait_for_task_finish(self, task):
task_finish = False
if task['code'] == wormhole_constants.TASK_SUCCESS:
return True
current_task = self.query_task(task)
task_code = current_task['code']
if wormhole_constants.TASK_DOING == task_code:
LOG.debug('task is DOING, status: %s' % task_code)
raise RetryException(error_info='task status is: %s' % task_code)
elif wormhole_constants.TASK_ERROR == task_code:
LOG.debug('task is ERROR, status: %s' % task_code)
raise Exception('task error, task status is: %s' % task_code)
elif wormhole_constants.TASK_SUCCESS == task_code:
LOG.debug('task is SUCCESS, status: %s' % task_code)
task_finish = True
else:
raise Exception('UNKNOW ERROR, task status: %s' % task_code)
LOG.debug('task: %s is finished' % task )
return task_finish
class WormHoleBusinessAWS(WormHoleBusiness):
def __init__(self, provider_node, aws_adapter, wormhole_service_port):
self.provider_node = provider_node
self.adapter = aws_adapter
super(WormHoleBusinessAWS, self).__init__(wormhole_service_port)
def get_clients(self):
LOG.debug('Start to get clients.')
ips = self._get_node_private_ips(self.provider_node)
LOG.debug('private ips: %s' % ips)
return self._get_hybrid_service_client(ips, self.wormhole_service_port)
def _get_node_private_ips(self, provider_node):
"""
:param provider_node: type Node,
:return: type list, return list of private ips of Node
"""
LOG.debug('start to get node private ips for node:%s' % provider_node.name)
private_ips = []
interfaces = self.adapter.ex_list_network_interfaces(node=provider_node)
for interface in interfaces:
if len(interface.extra.get('private_ips')) > 0:
for private_ip_dic in interface.extra.get('private_ips'):
private_ip = private_ip_dic.get('private_ip')
if private_ip:
private_ips.append(private_ip)
else:
continue
else:
continue
LOG.debug('end to get node private ips, private_ips: %s' % private_ips)
return private_ips
def _get_hybrid_service_clients_by_node(self, provider_node):
port = self.wormhole_service_port
private_ips = self._get_node_private_ips(provider_node)
LOG.debug('port: %s' % port)
LOG.debug('private ips: %s' % private_ips)
clients = self._get_hybrid_service_client(private_ips, port)
return clients
def _get_hybrid_service_client(self, ips, port):
clients = []
for ip in ips:
clients.append(Client(ip, port))
return clients
class WormHoleBusinessHWS(WormHoleBusiness):
def __init__(self, provider_node, aws_adapter, wormhole_service_port):
"""
:param provider_node:
{
"server": {
"status": "SHUTOFF",
"updated": "2016-04-19T12:22:30Z",
"hostId": "1ff2a35f6e2c07cf3a28a47042837ebeeb4a602ce71c2c5df3e2d116",
"OS-EXT-SRV-ATTR:host": "pod1a.cnnorth1",
"addresses": {
"742cef84-512c-43fb-a469-8e9e87e35459": [{
"OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:32:f2:e5",
"version": 4,
"addr": "172.21.0.88",
"OS-EXT-IPS:type": "fixed"
}]
},
"links": [{
"href": "https://compute.region.cnnorth1.hwclouds.com/v2/91d957f0b92d48f0b184c26975d2346e/servers/8be5ef12-20d8-4a05-95e0-c60f3ab04f43",
"rel": "self"
},
{
"href": "https://compute.region.cnnorth1.hwclouds.com/91d957f0b92d48f0b184c26975d2346e/servers/8be5ef12-20d8-4a05-95e0-c60f3ab04f43",
"rel": "bookmark"
}],
"key_name": null,
"image": "",
"OS-EXT-STS:task_state": null,
"OS-EXT-STS:vm_state": "stopped",
"OS-EXT-SRV-ATTR:instance_name": "instance-0001b077",
"OS-SRV-USG:launched_at": "2016-04-15T10:36:06.000000",
"OS-EXT-SRV-ATTR:hypervisor_hostname": "pod1a.cnnorth1",
"flavor": {
"id": "c1.medium",
"links": [{
"href": "https://compute.region.cnnorth1.hwclouds.com/91d957f0b92d48f0b184c26975d2346e/flavors/c1.medium",
"rel": "bookmark"
}]
},
"id": "8be5ef12-20d8-4a05-95e0-c60f3ab04f43",
"security_groups": [{
"name": "default"
}],
"OS-SRV-USG:terminated_at": null,
"OS-EXT-AZ:availability_zone": "cn-north-1a",
"user_id": "365cc2ee5b064ffd8bcec2e4fe1af6c7",
"name": "5GozuqKw",
"created": "2016-04-15T10:35:00Z",
"tenant_id": "91d957f0b92d48f0b184c26975d2346e",
"OS-DCF:diskConfig": "MANUAL",
"os-extended-volumes:volumes_attached": [{
"id": "3ef9e3f2-75c8-43f3-b4a0-47f6a6de4c6d"
}],
"accessIPv4": "",
"accessIPv6": "",
"OS-EXT-STS:power_state": 4,
"config_drive": "",
"metadata": {
"metering.image_id": "8254650b-05e9-4735-b297-f63e35cfb185",
"metering.imagetype": "gold",
"metering.resourcespeccode": "c1.medium.linux",
"metering.cloudServiceType": "hws.service.type.ec2",
"image_name": "Ubuntu Server 14.04 64bit",
"metering.resourcetype": "1",
"__openstack_region_name": "pod1a.cnnorth1",
"os_bit": "64",
"vpc_id": "742cef84-512c-43fb-a469-8e9e87e35459",
"os_type": "Linux",
"charging_mode": "0"
}
}
}
:param aws_adapter:
:param wormhole_service_port:
:return:
"""
LOG.debug('start to _init_ WormHoleBusinessHWS, provider_node: %s' % provider_node)
self.provider_node = provider_node
self.adapter = aws_adapter
super(WormHoleBusinessHWS, self).__init__(wormhole_service_port)
LOG.debug('end to _init_ WormHoleBusinessHWS')
def get_clients(self):
LOG.debug('Start to get clients.')
ips = self._get_node_private_ips(self.provider_node)
LOG.debug('private ips: %s' % ips)
return self._get_hybrid_service_client(ips, self.wormhole_service_port)
def _get_hybrid_service_client(self, ips, port):
clients = []
for ip in ips:
clients.append(Client(ip, port))
return clients
def _get_node_private_ips(self, provider_node):
"""
:param provider_node: type Node,
:return: type list, return list of private ips of Node
"""
LOG.debug('start to get node private ips for node:%s' % provider_node)
private_ips = []
addresses = provider_node['body']['server']['addresses']
for id, address in addresses.items():
for ip_info in address:
tmp_ip = ip_info['addr']
if tmp_ip:
private_ips.append(tmp_ip)
else:
continue
LOG.debug('end to get node private ips, private_ips: %s' % private_ips)
return private_ips | {
"repo_name": "nash-x/hws",
"path": "nova/virt/hws/wormhole_business.py",
"copies": "1",
"size": "15064",
"license": "apache-2.0",
"hash": 6840891618695736000,
"line_mean": 42.4149855908,
"line_max": 156,
"alpha_frac": 0.5485262878,
"autogenerated": false,
"ratio": 3.898550724637681,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4947077012437681,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import time
import traceback
from functools import wraps
from wormholeclient.client import Client
from wormholeclient import constants as wormhole_constants
from jacket.compute.exception import *
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class RetryDecorator(object):
"""Decorator for retrying a function upon suggested exceptions.
The decorated function is retried for the given number of times, and the
sleep time between the retries is incremented until max sleep time is
reached. If the max retry count is set to -1, then the decorated function
is invoked indefinitely until an exception is thrown, and the caught
exception is not in the list of suggested exceptions.
"""
def __init__(self, max_retry_count=-1, inc_sleep_time=5,
max_sleep_time=60, exceptions=()):
"""Configure the retry object using the input params.
:param max_retry_count: maximum number of times the given function must
be retried when one of the input 'exceptions'
is caught. When set to -1, it will be retried
indefinitely until an exception is thrown
and the caught exception is not in param
exceptions.
:param inc_sleep_time: incremental time in seconds for sleep time
between retries
:param max_sleep_time: max sleep time in seconds beyond which the sleep
time will not be incremented using param
inc_sleep_time. On reaching this threshold,
max_sleep_time will be used as the sleep time.
:param exceptions: suggested exceptions for which the function must be
retried
"""
self._max_retry_count = max_retry_count
self._inc_sleep_time = inc_sleep_time
self._max_sleep_time = max_sleep_time
self._exceptions = exceptions
self._retry_count = 0
self._sleep_time = 0
def __call__(self, f):
@wraps(f)
def f_retry(*args, **kwargs):
max_retries, mdelay = self._max_retry_count, self._inc_sleep_time
while max_retries > 1:
try:
return f(*args, **kwargs)
except self._exceptions as e:
LOG.error('retry times: %s, exception: %s' %
(str(self._max_retry_count - max_retries), traceback.format_exc(e)))
time.sleep(mdelay)
max_retries -= 1
if mdelay >= self._max_sleep_time:
mdelay = self._max_sleep_time
if max_retries == 1:
msg = 'func: %s, retry times: %s, failed' % (f.__name__, str(self._max_retry_count))
LOG.error(msg)
return f(*args, **kwargs)
return f_retry
class WormHoleBusiness(object):
def __init__(self, clients):
self.clients = clients
def get_version(self):
version = self._run_function_of_clients('get_version')
return version
def restart_container(self, network_info, block_device_info):
return self._run_function_of_clients('restart_container', network_info=network_info,
block_device_info=block_device_info)
def start_container(self, network_info, block_device_info):
return self._run_function_of_clients('start_container', network_info=network_info,
block_device_info=block_device_info)
def restart_container(self, network_info, block_device_info):
return self._run_function_of_clients('restart_container', network_info=network_info,
block_device_info=block_device_info)
def stop_container(self):
return self._run_function_of_clients('stop_container')
def create_container(self, name, image_uuid, injected_files, admin_password, network_info,
block_device_info):
return self._run_function_of_clients('create_container', image_name=name,
image_id=image_uuid, root_volume_id=None,
network_info=network_info, block_device_info=block_device_info,
inject_files=injected_files, admin_password=admin_password,
timeout=10)
def pause(self):
return self._run_function_of_clients('pause_container')
def unpause(self):
return self._run_function_of_clients('unpause_container')
def inject_file(self, dst_path, src_path=None, file_data=None, timeout=10):
return self._run_function_of_clients('inject_file', dst_path=dst_path, src_path=src_path,
file_data=file_data, timeout=timeout)
def list_volume(self):
return self._run_function_of_clients('list_volume')
def attach_volume(self, volume_id, device, mount_device, timeout=10):
return self._run_function_of_clients('attach_volume', volume_id=volume_id,
device=device, mount_device=mount_device, timeout=timeout)
def detach_volume(self, volume_id, timeout=10):
return self._run_function_of_clients('detach_volume', volume_id=volume_id, timeout=timeout)
def attach_interface(self, vif, timeout=10):
return self._run_function_of_clients('attach_interface', vif=vif, timeout=timeout)
def detach_interface(self, vif, timeout=10):
return self._run_function_of_clients('detach_interface', vif=vif, timeout=timeout)
def create_image(self, image_name, image_id, timeout=10):
return self._run_function_of_clients('create_image', image_name=image_name,
image_id=image_id, timeout=timeout)
def image_info(self, image_name, image_id):
return self._run_function_of_clients('image_info', image_name=image_name, image_id=image_id)
def query_task(self, task, timeout=10):
return self._run_function_of_clients('query_task', task=task, timeout=timeout)
def status(self):
return self._run_function_of_clients('status')
@RetryDecorator(max_retry_count=60, inc_sleep_time=5, max_sleep_time=60,
exceptions=(RetryException))
def _run_function_of_clients(self, function_name, *args, **kwargs):
result = None
tmp_except = Exception('tmp exception when doing function: %s' % function_name)
for client in self.clients:
func = getattr(client, function_name)
if func:
try:
result = func(*args, **kwargs)
#LOG.debug('Finish to execute %s' % function_name)
self.clients = []
self.clients.append(client)
break
except Exception, e:
tmp_except = e
continue
else:
raise Exception('There is not such function >%s< in wormhole client.' % function_name)
if not result:
#LOG.debug('exception is: %s' % traceback.format_exc(tmp_except))
raise RetryException(error_info=tmp_except.message)
return result
@RetryDecorator(max_retry_count=60, inc_sleep_time=5, max_sleep_time=60,
exceptions=(RetryException))
def wait_for_task_finish(self, task):
task_finish = False
if task['code'] == wormhole_constants.TASK_SUCCESS:
return True
current_task = self.query_task(task)
task_code = current_task['code']
if wormhole_constants.TASK_DOING == task_code:
LOG.debug('task is DOING, status: %s' % task_code)
raise RetryException(error_info='task status is: %s' % task_code)
elif wormhole_constants.TASK_ERROR == task_code:
LOG.debug('task is ERROR, status: %s' % task_code)
raise Exception('task error, task status is: %s' % task_code)
elif wormhole_constants.TASK_SUCCESS == task_code:
LOG.debug('task is SUCCESS, status: %s' % task_code)
task_finish = True
else:
raise Exception('UNKNOW ERROR, task status: %s' % task_code)
LOG.debug('task: %s is finished' % task )
return task_finish
if __name__ == '__main__':
clients = []
clients.append(Client('10.16.2.77', 7127))
wormhole = WormHoleBusiness(clients)
import pdb;pdb.set_trace()
try:
docker_version = wormhole.get_version()
except Exception, e:
error_info = 'docker server is not up, create docker app failed, exception: %s' % \
traceback.format_exc(e)
raise Exception(error_info)
| {
"repo_name": "HybridF5/jacket",
"path": "jacket/worker/hypercontainer/wormhole_business.py",
"copies": "1",
"size": "9146",
"license": "apache-2.0",
"hash": 5580152488077610000,
"line_mean": 42.7607655502,
"line_max": 108,
"alpha_frac": 0.5770828778,
"autogenerated": false,
"ratio": 4.238183503243744,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5315266381043744,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import unittest
import javabridge
from TASSELpy.TASSELbridge import TASSELbridge
try:
try:
javabridge.get_env()
except AttributeError:
print("AttributeError: start bridge")
TASSELbridge.start()
except AssertionError:
print("AssertionError: start bridge")
TASSELbridge.start()
except:
raise RuntimeError("Could not start JVM")
from TASSELpy.net.maizegenetics.taxa.TaxaListBuilder import *
from TASSELpy.net.maizegenetics.dna.snp.ImportUtils import ImportUtils
from TASSELpy.data import data_constants
from TASSELpy.java.lang.Object import Object
from TASSELpy.java.util.ArrayList import ArrayList
java_imports = {'String': 'java/lang/String',
'Taxon': 'net/maizegenetics/taxa/Taxon'}
class TaxaListBuilderTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Load data
try:
cls.data = ImportUtils.readGuessFormat(data_constants.SHORT_HMP_FILE)
cls.test = TaxaListBuilder()
assert type(cls.test) is TaxaListBuilder, "TaxaListBuilder constructure error"
except:
raise ValueError("Could not load test data")
def test_add(self):
arr = self.test.add(Taxon("Test Bokan"))
self.assertIsInstance(arr, TaxaListBuilder)
def test_addAll(self):
arr1 = self.test.addAll(self.data)
self.assertIsInstance(arr1, TaxaListBuilder)
string_arr = String.getArray(2)
string_arr[0] = "aaa"
string_arr[1] = "bbb"
arr2 = self.test.addAll(string_arr)
self.assertIsInstance(arr2, TaxaListBuilder)
Taxon_arr = Object.getArray(2)
Taxon_arr[0] = Taxon("BokanTest1")
Taxon_arr[1] = Taxon("BokanTest2")
arr3 = self.test.addAll(Taxon_arr)
self.assertIsInstance(arr3, TaxaListBuilder)
collection_arr = ArrayList(generic=(Taxon,))
collection_arr.add(0, Taxon("BokanTest4"))
collection_arr.add(0, Taxon("BokanTest3"))
arr4 = self.test.addAll(collection_arr)
self.assertIsInstance(arr4, TaxaListBuilder)
def test_duild(self):
arr1 = self.test.build()
self.assertIsInstance(arr1, TaxaList)
if __name__ == '__main__':
unittest.main(exit=False)
TASSELbridge.stop()
| {
"repo_name": "er432/TASSELpy",
"path": "TASSELpy/test/net/maizegenetics/taxa/TaxaListBuilderTest.py",
"copies": "1",
"size": "2311",
"license": "bsd-3-clause",
"hash": 1726566225332323600,
"line_mean": 32.0142857143,
"line_max": 90,
"alpha_frac": 0.665512765,
"autogenerated": false,
"ratio": 3.363901018922853,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9514364917284266,
"avg_score": 0.0030097733277175786,
"num_lines": 70
} |
__author__ = 'Administrator'
VERSION_1 = 'v1'
VERSION_2 = 'v2'
URI_SPE = '/'
REPLACE_SERVER_ID = '{server_id}'
REPLACE_DETACHMENT_ID = '{detachment_id}'
class ECS(object):
COMPOSITE_INTERFACE_ECS_PREFIX = "/%s/{project_id}/cloudservers" % VERSION_1
NATIVE_OPENSTACK_ECS_PREFIX = "/%s/{project_id}/servers" % VERSION_2
#/v1/{project_id}/servers
LIST_SERVERS = NATIVE_OPENSTACK_ECS_PREFIX
#/v1/{project_id}/servers/detail
LIST_DETAILS = URI_SPE.join([NATIVE_OPENSTACK_ECS_PREFIX, 'detail'])
#/v2/{project_id}/servers/{server_id}
GET_DETAIL = URI_SPE.join([NATIVE_OPENSTACK_ECS_PREFIX, REPLACE_SERVER_ID])
#/v1/{project_id}/servers
CREATE_SERVER = COMPOSITE_INTERFACE_ECS_PREFIX
#/v2/{project_id}/servers/flavors
LIST_FLAVOR = URI_SPE.join([COMPOSITE_INTERFACE_ECS_PREFIX, 'flavors'])
#/v2/{project_id}/servers/delete
DELETE_SERVER = URI_SPE.join([COMPOSITE_INTERFACE_ECS_PREFIX, 'delete'])
SERVER_ACTION = URI_SPE.join([NATIVE_OPENSTACK_ECS_PREFIX, REPLACE_SERVER_ID, 'action'])
#/v2/{project_id}/servers/{server_id}/action
STOP_SERVER = SERVER_ACTION
#/v2/{project_id}/servers/{server_id}/action
START_SERVER = SERVER_ACTION
#/v2/{project_id}/servers/{server_id}/action
REBOOT_SERVER = SERVER_ACTION
#v1/{project_id}/cloudservers/{server_id}/attachvolume
ATTACHE_VOLUME = URI_SPE.join([COMPOSITE_INTERFACE_ECS_PREFIX, REPLACE_SERVER_ID, 'attachvolume'])
#v1/{project_id}/cloudservers/{server_id}/detachvolume/{detachment_id}
DETACH_VOLUME = URI_SPE.join([COMPOSITE_INTERFACE_ECS_PREFIX, REPLACE_SERVER_ID, 'detachvolume', REPLACE_DETACHMENT_ID])
#/v2/{project_id}/servers/{server_id}/os-volume_attachments
GET_SERVER_VOLUME_LIST = URI_SPE.join([NATIVE_OPENSTACK_ECS_PREFIX, REPLACE_SERVER_ID, 'os-volume_attachments'])
class EVS(object):
NATIVE_OPENSTACK_EVS_PREFIX = '/%s/{project_id}/cloudvolumes' % VERSION_2
REPLACE_VOLUME_ID = '{volume_id}'
#/v2/{project_id}/cloudvolumes
LIST = NATIVE_OPENSTACK_EVS_PREFIX
#/v2/{project_id}/cloudvolumes
CREATE_VOLUME = NATIVE_OPENSTACK_EVS_PREFIX
#/v2/{project_id}/cloudvolumes/{volume_id}
DELETE_VOLUME = URI_SPE.join([NATIVE_OPENSTACK_EVS_PREFIX, REPLACE_VOLUME_ID])
#/v2/{project_id}/volumes/{volume_id}
GET_VOLUME_DETAIL = URI_SPE.join([NATIVE_OPENSTACK_EVS_PREFIX, REPLACE_VOLUME_ID])
class IMS(object):
NATIVE_OPENSTACK_IMS_PREFIX = '/%s/images' % VERSION_2
COMPOSITE_INTERFACE_IMS_PREFIX = '/%s/cloudimages' % VERSION_2
REPLACE_IMAGE_ID = '{image_id}'
#/v2/images
LIST = NATIVE_OPENSTACK_IMS_PREFIX
#/v2/cloudimages/action
IMAGE_ACTION = URI_SPE.join([COMPOSITE_INTERFACE_IMS_PREFIX, 'action'])
#/v2/cloudimages/action
CREATE_IMAGE = IMAGE_ACTION
#/v2/images/{image_id}
DELETE_IMAGE = URI_SPE.join([NATIVE_OPENSTACK_IMS_PREFIX, REPLACE_IMAGE_ID])
class VBS(object):
#/v2/%s/cloudbackups
COMPOSITE_INTERFACE_VBS_PREFIX = '/%s/{project_id}/cloudbackups' % VERSION_2
REPLACE_BACKUP_ID = '{backup_id}'
CREATE_BACKUP = COMPOSITE_INTERFACE_VBS_PREFIX
DELETE_BACKUP = URI_SPE.join([COMPOSITE_INTERFACE_VBS_PREFIX, REPLACE_BACKUP_ID])
class VPC(object):
COMPOSITE_INTERFACE_VPC_PREFIX = '/%s/{project_id}/vpcs' % VERSION_1
COMPOSITE_INTERFACE_SUBNET_PREFIX = '/%s/{project_id}/subnets' % VERSION_1
REPLACE_VPC_ID = '{vpc_id}'
REPLACE_SUBNET_ID = '{subnet_id}'
LIST = COMPOSITE_INTERFACE_VPC_PREFIX
GET_VPC_DETAIL = URI_SPE.join([COMPOSITE_INTERFACE_VPC_PREFIX, REPLACE_VPC_ID])
#v1/{project_id}/subnets
LIST_SUBNETS = COMPOSITE_INTERFACE_SUBNET_PREFIX
#/v1/{project_id}/subnets/{subnet_id}
GET_SUBNET_DETAIL = URI_SPE.join([COMPOSITE_INTERFACE_SUBNET_PREFIX, REPLACE_SUBNET_ID])
#/v1/{project_id}/subnets
CREATE_SUBNET = COMPOSITE_INTERFACE_SUBNET_PREFIX
if __name__ == '__main__':
project_id = 'project01'
server_id = 'server012'
print EVS.LIST.format(project_id=project_id)
print EVS.CREATE_VOLUME.format(project_id=project_id)
print EVS.DELETE_VOLUME.format(project_id=project_id, volume_id='volume001')
print ECS.GET_SERVER_VOLUME_LIST.format(project_id='project01', server_id='server012')
| {
"repo_name": "nash-x/hws",
"path": "hwcloud/hws_service/uri.py",
"copies": "1",
"size": "4269",
"license": "apache-2.0",
"hash": 3775147479689216000,
"line_mean": 30.3897058824,
"line_max": 124,
"alpha_frac": 0.688451628,
"autogenerated": false,
"ratio": 2.904081632653061,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9015284581621505,
"avg_score": 0.015449735806311263,
"num_lines": 136
} |
__author__ = 'Administrator'
from django.db import models
class Product(models.Model):
title = models.CharField(max_length=100, blank=False, unique=True)
description = models.TextField()
image_url = models.URLField(max_length=200)
price = models.DecimalField(max_digits=8, decimal_places=1)
date_available = models.DateField()
def __str__(self):
return self.title
class ProductItem(models.Model):
product = models.ForeignKey(to=Product)
unit_price = models.DecimalField(max_digits=8, decimal_places=1)
quantity = models.IntegerField()
class People(models.Model):
name = models.CharField(max_length=20)
age = models.IntegerField()
#class Meta:
# abstract = True
class Male(People):
sex = models.BooleanField()
class Cart(object):
def __init__(self, items=[], total=0):
self.items = items
self.total_price = total
def add_product(self, product):
self.total_price += product.price
for item in self.items:
if item.product.id == product.id:
item.quantity += 1
return
self.items.append(ProductItem(product=product, unit_price=product.price, quantity=1))
class Place(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=80)
def __str__(self):
return self.name
class Restaurant(Place):
serves_hot_dogs = models.BooleanField()
serves_pizza = models.BooleanField()
class Supplier(Place):
customers = models.ManyToManyField(Restaurant)
| {
"repo_name": "HarrisonHDU/myerp",
"path": "apps/depot/models.py",
"copies": "1",
"size": "1639",
"license": "mit",
"hash": 9124463774526781000,
"line_mean": 24.0158730159,
"line_max": 93,
"alpha_frac": 0.6351433801,
"autogenerated": false,
"ratio": 3.911694510739857,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5046837890839857,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from django.http import HttpResponse
from django.utils import simplejson
from django.shortcuts import render_to_response
from datetime import datetime
from survey.forms import *
from util import *
from survey.models import *
def showIP(request):
if request.POST:
content = request.POST.get("content");
html = "Your IP is %s" % request.META['REMOTE_ADDR']
start_time = request.session.get('start_time')
end_time = datetime.now()
duration = str(end_time - start_time)
dict = {"IP":html,"message":duration,}
return HttpResponse(simplejson.dumps(dict), mimetype='application/javascript')
# print duration
# return HttpResponse(duration)
start_time = datetime.now()
request.session['start_time'] = start_time;
template = "showIP.html"
data = {}
return render_to_response(template,data)
def result(request):
if request.POST:
answer1 = request.POST.get("answer1");
answer2 = request.POST.get("answer2");
answer3 = request.POST.get("answer3");
start_time = request.session.get('start_time')
end_time = datetime.now()
duration = str(end_time - start_time)
duration = duration.split('.')[0];
html = "";
html += "The answer of question %d is %s <br />" % (1,answer1)
html += "The answer of question %d is %s <br />" % (2,answer2)
html += "The answer of question %d is %s <br />" % (3,answer3)
html += "The total time you used is %s <br />" % duration
dict = {"result":html,}
return HttpResponse(simplejson.dumps(dict), mimetype='application/javascript')
template = "showIP.html"
data = {}
return render_to_response(template,data)
def register(request):
if request.POST:
form = RegistrationForm(request.POST)
if form.is_valid():
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password1')
email=form.cleaned_data.get('email')
dict = {"username":username,
"password":password,
"email": email,}
return HttpResponse(simplejson.dumps(dict), mimetype='application/javascript')
else:
form = RegistrationForm()
dict = {"register_form":form}
template = 'cxq_registration.html'
return render_to_response(template, dict)
def add_component(request,surveyID=0):###
if request.POST:
request.session['question_created_total'] = str(int(request.session['question_created_total'])+1)
question_no = int(request.session.get("question_created_total")) #This is used to group selections, not for the index.
question_description = 'Click here to change the description'###
question_helptext = "Click here to add help text"###
values = "sample1@#@sample2@#@sample3@#@sample4"###
if request.POST.get("questionID"):###
questionID = int(request.POST.get("questionID"))###
question = Question.objects.get(id=questionID)
question_description = question.title
question_helptext = question.help_text
if question.type in ("multiplechoice","checkbox"):
if question.type == "multiplechoice":
choices = question.multiplechoicequestion.choices.all()
if question.type == "checkbox":
choices = question.checkboxquestion.choices.all()
value = ""
for choice in choices:
value += "%s@#@" % choice.label
value = value[0:-3]
html = "<div class='singleQuestionDiv'>"
html += "<span class='question_no'>Q:</span>"
html += "<span class='question_description editable'>%s</span><br />" % question_description###
html += "<span class='question_helptext editable hideable'>%s</span><br />" % question_helptext###
type = request.POST.get("question_type")
if (type=="paragraph"):
html += show_paragraph()
elif (type=="numeric"):
html += show_numeric()
elif (type=="checkbox"):
group_name = question_no
###
html += show_checkbox(group_name,values)
elif (type=="multiplechoice"):
group_name = question_no
###
html += show_mcq(group_name,values)
html+="</div>"
dict = {"content": html}
return HttpResponse(simplejson.dumps(dict), mimetype='application/javascript')
request.session['question_created_total']='0'
template = "cxq_edit_survey.html"
survey = "" ###
title = "New Survey(Click to change)"
description = "Add description here"
surveyID = int(surveyID)
if surveyID != 0 :
survey = Survey.objects.get(id=surveyID)
title = survey.title
description = survey.description
dict = {'surveyID':surveyID, 'survey':survey, "title":title, "description":description} ###
return render_to_response(template,dict)
def create_survey(request):
if request.POST:
survey_title = request.POST.get("survey_title")
survey_description = request.POST.get("survey_description")
survey = Survey(title = survey_title)
survey.description = survey_description
survey.save()
surveyID = survey.id
dict = {"surveyID":surveyID}
return HttpResponse(simplejson.dumps(dict),mimetype='application/javascript')
def delete_survey(request):
if request.POST:
surveyID = int(request.POST.get("surveyID"))
survey = Survey.objects.get(id=surveyID)
survey.delete()
dict = {}
return HttpResponse(simplejson.dumps(dict),mimetype='application/javascript')
def save_survey(request,surveyID):
if request.POST:
question_type = request.POST.get("question_type")
question_no = request.POST.get("question_no")
question_helptext = request.POST.get("question_helptext")
question_title = request.POST.get("question_title")
selections = request.POST.get("selections")
attributes = request.POST.get("attributes")
print question_type
if int(surveyID)==0:
survey = Survey(title="no title")
survey.save()
surveyID = survey.id
if question_type=="paragraph":
question = ParagraphQuestion()
elif question_type == "numeric":
question = NumericQuestion()
elif question_type == "multiplechoice":
question = MultipleChoiceQuestion()
elif question_type == "checkbox":
question = CheckboxQuestion()
else :
return
question.survey = Survey.objects.get(id=surveyID)
question.id_in_survey = question_no
question.title = question_title
question.help_text = question_helptext
question.max_no_characters = 0
print "1"
question.save()
print "2"
if question_type == "paragraph":
attributes_list = attributes.split("@#@")
question.max_no_characters = int(attributes_list[0])
elif question_type == "numeric":
attributes_list = attributes.split("@#@")
question.max_value = int(attributes_list[0])
question.min_value = int(attributes_list[1])
elif question_type == "multiplechoice":
choices = selections.split("@#@")
choices.pop()
count = 0
for choice_label in choices:
count += 1
choice = MultipleChoice()
choice.question = question
choice.label = choice_label
choice.id_in_question = count
choice.save()
elif question_type == "checkbox":
choices = selections.split("@#@")
choices.pop()
count = 0
for choice_label in choices:
count += 1
choice = CheckboxChoice()
choice.question = question
choice.label = choice_label
choice.id_in_question = count
choice.save()
else :
return
print "3"
question.save()
print "4"
dict = {"surveyID":surveyID}
return HttpResponse(simplejson.dumps(dict), mimetype='application/javascript')
def view_survey(request, view_key, *args, **kwargs):
survey = Survey.objects.get(id = int(view_key))
questions = survey.questions.order_by('id_in_survey')
request.session['dt_start'] = datetime.now()
dict = {'survey' : survey, 'questions' : questions, 'dt_start': datetime.now()}
return render_to_response('respondent.html', dict)
def multiajax(request):
template = "cxq_multiajax.html"
data = {"current":"1"}
if request.POST:
total = request.POST.get("total")
current = request.POST.get("current")
dict = {"current":current}
return HttpResponse(simplejson.dumps(dict), mimetype='application/javascript')
# template = "cxq_add_question.html"
# return render_to_response(template,data)
# print duration
# return HttpResponse(duration)
return render_to_response(template,data)
def create_response(request):
if request.POST:
resp = Response()
resp.ip_address = request.META['REMOTE_ADDR']
print resp.ip_address
resp.dt_start= request.session.get('dt_start')
print resp.dt_start
resp.dt_end = datetime.now()
print resp.dt_end
resp.survey = Survey.objects.get(id=int(request.POST.get("surveyID")))
print resp.survey
resp.save()
dict = {"responseID":resp.id}
return HttpResponse(simplejson.dumps(dict),mimetype='application/javascript')
def response_survey(request,responseID):
if request.POST:
answer = Answer()
answer.response = Response.objects.get(id=responseID)
answer.id_in_response = int(request.POST.get("id_in_response"))
answer.type = request.POST.get("type")
answer.value = request.POST.get("value")
answer.save()
dict = {}
return HttpResponse(simplejson.dumps(dict),mimetype='application/javascript')
def data_table(request):
if request.POST:
aColumns = ["engine", 'browser', 'platform', 'version', 'grade'];
sIndexColumn = "id"
sLimit = ""
if request.GET.get('iDisplayStart') and request.GET.get('iDisplayStart')!='-1':
sLimitStart = int(request.GET.get('iDisplayStart'))
sLimitEnd = int(request.GET.get('iDisplayLength'))+sLimitStart
if request.GET.get('iSortCol_0'):
sOrder = ""
iSortingCols = int(request.GET.get('iSortingCols'))
for i in range(iSortingCols):
if (request.GET.get('bSortable_'+request.GET.get('iSortCol_'+str(i)))):
sOrder += "'"+aColumns[int(request.GET.get('iSortCol'+str(i)))]+"" \
"'"+request.GET.get('sSortDir_'+str(i))+", "
sOrder = sOrder.rstrip(', ')
total = request.POST.get("total");
current = request.POST.get("current");
dict = {"current":current}
return HttpResponse(simplejson.dumps(dict), mimetype='application/javascript')
template = ""
data = {}
return render_to_response(template,data)
| {
"repo_name": "sfdye/ntusurvey",
"path": "survey/cxq_views.py",
"copies": "2",
"size": "11779",
"license": "mit",
"hash": -2122918005494047000,
"line_mean": 40.3705035971,
"line_max": 126,
"alpha_frac": 0.5810340436,
"autogenerated": false,
"ratio": 4.178432068109259,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5759466111709259,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from django.utils import encoding
def intval(x, default=0):
'''
convert str to int value.
Returns: int
'''
if not x or not str(x).isdigit():
return default
return int(x)
def start_limit(request, limit_default=0):
'''
from HttpRequst get start and limit parameter, POST is first
Return: start, start+limit
'''
start = request.POST.get('start', request.GET.get('start'))
limit = request.POST.get('limit', request.GET.get('limit'))
start, limit = intval(start, default=0), intval(limit, default=limit_default)
return start, start + limit
def callback(msg):
'''
return success responseText
'''
return {'success': True, 'msg': msg}
def errorcallback(msg='ERROR'):
'''
生成 error responseText
Returns: dict
'''
return {'success': False, 'msg': msg}
def curpage(total, data_itmes):
'''
为分页store提供数据, {total:XXX, items: XXX}
Returns: dict
'''
return {'total': total, 'items': data_itmes}
def items(value):
'''
可以为不需要分页的store提供数据
Returns: dict
'''
return {'items': list(value)}
def getparam(request, *params):
'''
获取 *params 指定的参数值, GET first
Returns: dict
'''
kv = dict()
for p in params:
if p in request.GET:
kv[p] = encoding.smart_text(request.GET.get(p), strings_only=True)
elif p in request.POST:
kv[p] = encoding.smart_text(request.POST.get(p), strings_only=True)
else:
print('parameter {} lost'.format(p))
return kv | {
"repo_name": "HarrisonHDU/myerp",
"path": "apps/sims/tools/helper.py",
"copies": "1",
"size": "1711",
"license": "mit",
"hash": -5844547648495078000,
"line_mean": 21.6142857143,
"line_max": 81,
"alpha_frac": 0.5741974561,
"autogenerated": false,
"ratio": 3.4978813559322033,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4572078812032203,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
from Tkinter import *
class Application(Frame):
def say_hi(self):
print "hi there, everyone!"
def createWidgets(self):
self.QUIT = Button(self)
self.QUIT["text"] = "QUIT"
self.QUIT["fg"] = "red"
self.QUIT["command"] = self.quit
self.QUIT.pack({"side": "left"})
self.hi_there = Button(self)
self.hi_there["text"] = "Hello",
self.hi_there["command"] = self.say_hi
self.hi_there.pack({"side": "left"})
self.fred = Button(self, fg = "red", bg = "blue")
self.fred['text'] = 'Nash smile'
self.fred['command'] = self.say_hi
self.fred.pack({"side": "left"})
def __init__(self, master=None):
Frame.__init__(self, master)
self.pack()
self.createWidgets()
root = Tk()
root.title('My first Py GUI Programing')
mainmenu=Menu(root)
root['menu']=mainmenu
app = Application(master=root)
app.mainloop()
root.destroy() | {
"repo_name": "nash-x/mini-game",
"path": "tetris/test_tkinter.py",
"copies": "1",
"size": "1025",
"license": "apache-2.0",
"hash": -806811758441642500,
"line_mean": 25.0263157895,
"line_max": 57,
"alpha_frac": 0.5502439024,
"autogenerated": false,
"ratio": 3.349673202614379,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4399917105014379,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import json
from django.core.serializers import serialize
from django.db.models.query import QuerySet, ValuesQuerySet
from django.db.models import Model
import decimal
import sys
import datetime
from erp import settings
from django import http
class DjangoJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return str(o)
elif isinstance(o, datetime.datetime):
return o.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(o, datetime.date):
return o.strftime('%Y-%m-%d')
elif isinstance(o, datetime.time):
return o.strftime('%H:%M:%S')
elif isinstance(o, ValuesQuerySet):
return list(o)
elif isinstance(o, QuerySet):
return list(o)
elif isinstance(o, Model):
return {k: getattr(o, k) for k in [f.name for f in o._meta.fields]}
else:
return super(DjangoJSONEncoder, self).default(o)
def Ajax(fn):
def wrapper(*args, **kwargs):
request = args[0]
if not settings.DEBUG and not request.is_ajax():
raise http.Http404
result = fn(*args, **kwargs)
result = json.dumps(result, cls=DjangoJSONEncoder)
return http.HttpResponse(result, content_type="application/json")
return wrapper
| {
"repo_name": "HarrisonHDU/myerp",
"path": "apps/sims/tools/tool.py",
"copies": "1",
"size": "1403",
"license": "mit",
"hash": -4942612856945218000,
"line_mean": 31.4047619048,
"line_max": 79,
"alpha_frac": 0.612259444,
"autogenerated": false,
"ratio": 4.066666666666666,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5178926110666666,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import json
from django.db.models.query import QuerySet, ValuesQuerySet
from django.db.models import Model
import decimal
import datetime
from erp import settings
from django import http
class DjangoJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return str(o)
elif isinstance(o, datetime.datetime):
return o.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(o, datetime.date):
return o.strftime('%Y-%m-%d')
elif isinstance(o, datetime.time):
return o.strftime('%H:%M:%S')
elif isinstance(o, ValuesQuerySet):
return list(o)
elif isinstance(o, QuerySet):
return list(o)
elif isinstance(o, Model):
return {k: getattr(o, k) for k in [f.name for f in o._meta.fields]}
else:
return super(DjangoJSONEncoder, self).default(o)
def Ajax(fn):
def wrapper(*args, **kwargs):
request = args[0]
if not settings.DEBUG and not request.is_ajax():
raise http.Http404
result = fn(*args, **kwargs)
result = json.dumps(result, cls=DjangoJSONEncoder)
return http.HttpResponse(result, content_type="application/json")
return wrapper
def analysis_iterable_object(iterable_obj, rulers, item_type='dict'):
"""
根据d中定义的解析规则解析可迭代对象iterable_obj,返回包含解析结果的list对象,
参数item_type = 'dict'适用于JSON类型数据,item_type = 'list' 适用于Array类型数据
参数d是一个元组,元组中的每一项都是一个二元元组,定义解析规则,例如:
(
('id', 'id'),
('name', '__CONST__Kobe'),
('related_obj_name', lambda obj: getattr(getattr(obj, 'related_id'),'related_name')),
('is_delete', True),
)
上面例子中的解析规则如下:
id 表示可迭代对象中每一项的属性id
name 表示常量 'Kone'
related_obj_name 表示调用lambda表达式(可调用对象)获得相应的值
is_delete 表示为常量 True
"""
def add_item(o, kname, el):
if isinstance(o, dict):
o[kname] = el
elif isinstance(o, list):
o.append(el)
const_strip_length = len('__CONST__')
r = list()
for item in iterable_obj:
element = dict() if item_type == 'dict' else list()
for ruler in rulers:
# 每一个规则元组都可以看作是一个键值对
k, v = ruler
if callable(v):
# 可调用对象
add_item(element, k, v(item))
elif not isinstance(v, str):
# 整形,布尔型等非字符串对象
add_item(element, k, v)
elif v.startswith('__CONST__'):
# 字符串常量
add_item(element, k, v[const_strip_length:])
else:
# 对象属性
add_item(element, k, getattr(item, v))
r.append(element)
return r | {
"repo_name": "HarrisonHDU/myerp",
"path": "apps/myerp/tools/tool.py",
"copies": "1",
"size": "3195",
"license": "mit",
"hash": -464234936598865860,
"line_mean": 31.0348837209,
"line_max": 93,
"alpha_frac": 0.551602677,
"autogenerated": false,
"ratio": 2.9758909853249476,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40274936623249474,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
import os
class GtConfig:
def __init__(self):
pass
@staticmethod
def isPushSingleBatchAsync():
return os.getenv("gexin_pushSingleBatch_needAsync", False)
@staticmethod
def isPushListAsync():
return os.getenv("gexin_pushList_needAsync", False)
@staticmethod
def isPushListNeedDetails():
return GtConfig.getProperty("gexin_pushList_needDetails", "needDetails", False)
@staticmethod
def getHttpProxyIp():
return os.getenv("gexin_http_proxy_ip", None)
@staticmethod
def getHttpProxyPort():
return os.getenv("gexin_http_proxy_port", 80)
@staticmethod
def getSyncListLimit():
return os.getenv("gexin_pushList_syncLimit", 1000)
@staticmethod
def getAsyncListLimit():
return os.getenv("gexin_pushList_asyncLimit", 10000)
@staticmethod
def getHttpConnectionTimeOut():
return os.getenv("gexin_http_connection_timeout", 60)
@staticmethod
def getHttpSoTimeOut():
return os.getenv("gexin_http_so_timeout", 30)
@staticmethod
def getHttpTryCount():
return os.getenv("gexin_http_tryCount", 3)
@staticmethod
def getHttpInspectInterval():
return os.getenv("gexin_http_inspect_interval", 60)
@staticmethod
def getDefaultDomainUrl(useSSL):
hosts = list()
host = os.getenv("gexin_default_domainurl", None)
if host is None or "" == host.strip():
if useSSL :
hosts.append("https://cncapi.getui.com/serviceex")
hosts.append("https://telapi.getui.com/serviceex")
hosts.append("https://api.getui.com/serviceex")
hosts.append("https://sdk1api.getui.com/serviceex")
hosts.append("https://sdk2api.getui.com/serviceex")
hosts.append("https://sdk3api.getui.com/serviceex")
else:
hosts.append("http://sdk.open.api.igexin.com/serviceex")
hosts.append("http://sdk.open.api.gepush.com/serviceex")
hosts.append("http://sdk.open.api.getui.net/serviceex")
hosts.append("http://sdk1.open.api.igexin.com/serviceex")
hosts.append("http://sdk2.open.api.igexin.com/serviceex")
hosts.append("http://sdk3.open.api.igexin.com/serviceex")
else:
for h in host.split(','):
if h.startswith("https://") and not useSSL:
continue
if h.startswith("http://") and useSSL:
continue
if not h.startswith("http") and useSSL:
h = "https://" + h
hosts.append(h)
return hosts
@staticmethod
def getSDKVersion():
return "4.0.1.0"
@staticmethod
def getProperty(oldKey, newKey, defaultValue):
newValue = os.getenv(newKey)
oldValue = os.getenv(oldKey)
if newValue is not None:
return newValue
elif oldValue is not None:
return oldValue
else:
return defaultValue
| {
"repo_name": "jerryjobs/thirdpartPushSystem",
"path": "push/getui/GtConfig.py",
"copies": "2",
"size": "3235",
"license": "apache-2.0",
"hash": 6653030507669488000,
"line_mean": 29.7156862745,
"line_max": 87,
"alpha_frac": 0.5740340031,
"autogenerated": false,
"ratio": 4.053884711779449,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005404274069178232,
"num_lines": 102
} |
__author__ = 'Administrator'
import sqlite3
import sys
import os
import traceback
import time
from log import LOG
TIMES = 50
INTERVAL = 0.1
def retry(times, interval):
def _wrapper(f):
def __wrapper(*args, **kwargs):
timer = 0
while(True):
try:
if timer <= times:
result = f(*args, **kwargs)
return result
except Exception, e:
LOG.debug('Do DB action Exception: %s' % traceback.format_exc())
if timer < times:
timer += 1
time.sleep(interval)
LOG.error('Start to retry to do db action, TIME: %s' % timer)
continue
else:
LOG.error('Do DB Exception: %s' % traceback.format_exc())
raise e
return __wrapper
return _wrapper
def close_connection():
def _wrapper(func):
def __wrapper(self, *args, **kwargs):
try:
result = func(self, *args, **kwargs)
return result
except Exception, e:
raise e
finally:
self.close()
return __wrapper
return _wrapper
class DatabaseManager(object):
conn = None
def __init__(self):
self.HWS_GATEWAY_DB = 'hws_gateway.db'
self.DB_STORE_PATH = "/home/sqlite_db"
self.CREATE_TABLE_SERVER_ID_MAPPING = \
'''CREATE TABLE server_id_mapping(cascading_server_id text, cascaded_server_id text)'''
self.INSERT_SERVER_ID_MAPPING = \
'''INSERT INTO server_id_mapping(cascading_server_id, cascaded_server_id) VALUES (?,?)'''
self.CREATE_TABLE_SERVER_ID_NAME_MAPPING = \
'''CREATE TABLE server_id_name_mapping(cascading_server_id text, cascaded_server_name text)'''
self.INSERT_SERVER_ID_NAME_MAPPING = \
'''INSERT INTO server_id_name_mapping(cascading_server_id, cascaded_server_name) VALUES (?,?)'''
self.CREATE_TABLE_IMAGE_ID_MAPPING = \
'''CREATE TABLE image_id_mapping(cascading_image_id text, cascaded_image_id text)'''
self.INSERT_IMAGE_ID_MAPPING = \
'''INSERT INTO image_id_mapping(cascading_image_id, cascaded_image_id) VALUES (?,?)'''
self.CREATE_TABLE_FLAVOR_ID_MAPPING = \
'''CREATE TABLE flavor_id_mapping(cascading_flavor_id text, cascaded_flavor_id text)'''
self.INSERT_TABLE_FLAVOR_ID_MAPPING = \
'''INSERT INTO flavor_id_mapping(cascading_flavor_id, cascaded_flavor_id) VALUES (?,?)'''
self.CREATE_TABLE_VOLUME_MAPPING = \
'''CREATE TABLE volume_mapping(cascading_volume_id text, cascaded_volume_id text, cascaded_backup_id text, cascaded_image_id text)'''
self.INSERT_VOLUME_MAPPING = \
'''INSERT INTO volume_mapping(cascading_volume_id, cascaded_volume_id, cascaded_backup_id, cascaded_image_id) VALUES (?, ?, ?, ?)'''
db_full_name = self.get_hws_gateway_db_full_name()
if not os.path.isfile(db_full_name):
self.init_database()
def get_current_dir(self):
return os.path.split(os.path.realpath(__file__))[0]
def get_hws_gateway_db_full_name(self):
full_name = os.path.join(self.DB_STORE_PATH, self.HWS_GATEWAY_DB)
return full_name
def connect(self):
if DatabaseManager.conn is None:
DatabaseManager.conn = sqlite3.connect(self.get_hws_gateway_db_full_name())
return DatabaseManager.conn
def close(self):
if DatabaseManager.conn:
DatabaseManager.conn.close()
DatabaseManager.conn = None
def commit(self):
if DatabaseManager.conn:
DatabaseManager.conn.commit()
def init_database(self):
self.create_table_server_id_mapping()
self.create_table_server_id_name_mapping()
self.create_table_image_id_mapping()
self.create_table_flavor_id_mapping()
self.create_table_volume_mapping()
@retry(TIMES, INTERVAL)
@close_connection()
def add_server_id_mapping(self, cascading_server_id, cascaded_server_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_SERVER_ID_MAPPING
cursor.execute(exe_sql, (cascading_server_id, cascaded_server_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_server_id_by_cascading_id(self, cascading_server_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM server_id_mapping WHERE cascading_server_id = ?"
data = [cascading_server_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_server_id(self, cascading_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_server_id FROM server_id_mapping "
"WHERE cascading_server_id = '%s'"
% cascading_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_server_id_name_mapping(self, cascading_server_id, cascaded_server_name):
cursor = self.connect().cursor()
exe_sql = self.INSERT_SERVER_ID_NAME_MAPPING
cursor.execute(exe_sql, (cascading_server_id, cascaded_server_name))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_server_name(self, cascading_server_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_server_name FROM server_id_name_mapping "
"WHERE cascading_server_id = '%s'" % cascading_server_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def delete_server_id_name_by_cascading_id(self, cascading_server_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM server_id_name_mapping WHERE cascading_server_id = ?"
data = [cascading_server_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def add_image_id_mapping(self, cascading_image_id, cascaded_image_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_IMAGE_ID_MAPPING
cursor.execute(exe_sql, (cascading_image_id, cascaded_image_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_image_id_mapping(self, cascading_image_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM image_id_mapping WHERE cascading_image_id = ?"
data = [cascading_image_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_image_id(self, cascading_image_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_image_id FROM image_id_mapping "
"WHERE cascading_image_id = '%s'" % cascading_image_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_flavor_id_mapping(self, cascading_flavor_id, cascaded_flavor_id):
cursor = self.connect().cursor()
exe_sql = self.INSERT_TABLE_FLAVOR_ID_MAPPING
cursor.execute(exe_sql, (cascading_flavor_id, cascaded_flavor_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def delete_flavor_id_mapping(self, cascading_flavor_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM flavor_id_mapping WHERE cascading_flavor_id = ?"
data = [cascading_flavor_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_flavor_id(self, cascading_flavor_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_flavor_id FROM flavor_id_mapping "
"WHERE cascading_flavor_id = '%s'" % cascading_flavor_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def add_volume_mapping(self, cascading_volume_id, cascaded_volume_id,
cascaded_backup_id=None, cascaded_image_id=None):
cursor = self.connect().cursor()
exe_sql = self.INSERT_VOLUME_MAPPING
cursor.execute(exe_sql, (cascading_volume_id, cascaded_volume_id, cascaded_backup_id, cascaded_image_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_volume_id(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_volume_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_backup_in_volume_mapping(self, cascading_volume_id, cascaded_backup_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascading_volume_id='%s' WHERE cascaded_volume_id='%s'" %
(cascaded_backup_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_image_in_volume_mapping(self, cascading_volume_id, cascaded_image_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascading_volume_id='%s' WHERE cascaded_volume_id='%s'" %
(cascaded_image_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def update_cascaded_volume_in_volume_mapping(self, cascading_volume_id, cascaded_volume_id):
cursor = self.connect().cursor()
cursor.execute("UPDATE volume_mapping SET cascaded_volume_id='%s' WHERE cascading_volume_id='%s'" %
(cascaded_volume_id, cascading_volume_id))
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_backup(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_backup_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def get_cascaded_backup_image(self, cascading_volume_id):
cursor = self.connect().cursor()
cursor.execute("SELECT cascaded_image_id FROM volume_mapping "
"WHERE cascading_volume_id = '%s'"
% cascading_volume_id)
row = cursor.fetchone()
if row:
return str(row[0])
return None
@retry(TIMES, INTERVAL)
@close_connection()
def delete_volume_mapping(self, cascading_volume_id):
cursor = self.connect().cursor()
exe_sql = "DELETE FROM volume_mapping WHERE cascading_volume_id = ?"
data = [cascading_volume_id]
cursor.execute(exe_sql, data)
self.commit()
@retry(TIMES, INTERVAL)
@close_connection()
def drop_table(self, table_name):
cursor = self.connect().cursor()
cursor.execute('drop table if exists %s' % table_name)
self.commit()
def create_tables(self, create_table_sql_list):
"""
:param table_name_list: list of table names.
:return:
"""
cursor = self.connect().cursor()
for create_table_sql in create_table_sql_list:
cursor.execute(create_table_sql)
self.commit()
def drop_all_tables(self):
self.drop_table_server_id_mapping()
self.drop_table_server_id_name_mapping()
self.drop_table_image_id_mapping()
self.drop_table_flavor_id_mapping()
self.drop_table_volume_mapping()
def create_table(self, create_table_sql):
self.create_tables([create_table_sql])
def drop_table_server_id_mapping(self):
self.drop_table('server_id_mapping')
def drop_table_server_id_name_mapping(self):
self.drop_table('server_id_name_mapping')
def drop_table_image_id_mapping(self):
self.drop_table('image_id_mapping')
def drop_table_flavor_id_mapping(self):
self.drop_table('flavor_id_mapping')
def drop_table_volume_mapping(self):
self.drop_table('volume_mapping')
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_image_id_mapping(self):
if not self.is_table_exist('image_id_mapping'):
self.create_table(self.CREATE_TABLE_IMAGE_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_server_id_mapping(self):
if not self.is_table_exist('server_id_mapping'):
self.create_table(self.CREATE_TABLE_SERVER_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_server_id_name_mapping(self):
if not self.is_table_exist('server_id_name_mapping'):
self.create_table(self.CREATE_TABLE_SERVER_ID_NAME_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_flavor_id_mapping(self):
if not self.is_table_exist('flavor_id_mapping'):
self.create_table(self.CREATE_TABLE_FLAVOR_ID_MAPPING)
@retry(TIMES, INTERVAL)
@close_connection()
def create_table_volume_mapping(self):
if not self.is_table_exist('volume_mapping'):
self.create_table(self.CREATE_TABLE_VOLUME_MAPPING)
def is_table_exist(self, table_name):
cursor = self.connect().cursor()
sql = "SELECT name FROM sqlite_master WHERE type='table' AND name='%s'" % table_name
cursor.execute(sql)
row = cursor.fetchone()
if row:
return True
return False
def print_option():
print('Support options:')
print(' add_image_mapping [CASCADING_IMAGE_ID] [CASCADED_IMAGE_ID]')
print(' add_flavor_mapping [CASCADING_FLAVOR_ID] [CASCADED_FLAVOR_ID]')
print(' add_server_mapping [CASCADING_SERVER_ID] [CASCADED_SERVER_ID]')
print(' get_cascaded_image [CASCADING_IMAGE_ID]')
print(' get_cascaded_flavor [CASCADING_FLAVOR_ID]')
print(' get_cascaded_server [CASCADING_SERVER_ID]')
if __name__ == '__main__':
database_manager = DatabaseManager()
if len(sys.argv) <= 1:
database_manager.close()
print('please enter option.')
print_option()
exit(0)
mode = sys.argv[1]
if mode == 'init_db':
print('Start to create database for Database Manager >>>>>>')
database_manager.init_database()
print('End to create database for Database Manager >>>>>>')
elif mode == 'drop_db':
print('Start to drop database for Database Manager >>>>>>')
database_manager.drop_all_tables()
print('Finish to drop database for Database Manager >>>>>>')
elif mode == 'add_image_mapping':
if len(sys.argv) == 4:
cascading_image_id = sys.argv[2]
cascaded_image_id = sys.argv[3]
database_manager.add_image_id_mapping(cascading_image_id, cascaded_image_id)
elif mode == 'add_flavor_mapping':
if len(sys.argv) == 4:
cascading_flavor_id = sys.argv[2]
cascaded_flavor_id = sys.argv[3]
database_manager.add_flavor_id_mapping(cascading_flavor_id, cascaded_flavor_id)
elif mode == 'get_cascaded_image':
if len(sys.argv) == 3:
cascading_image_id = sys.argv[2]
cascaded_image_id = database_manager.get_cascaded_image_id(cascading_image_id)
print('cascaded image id: %s' % cascaded_image_id)
elif mode == 'get_cascaded_flavor':
if len(sys.argv) == 3:
cascading_flavor_id = sys.argv[2]
cascaded_flavor_id = database_manager.get_cascaded_flavor_id(cascading_flavor_id)
print('cascaded flavor id: %s' % cascaded_flavor_id)
elif mode == 'get_cascaded_server':
if len(sys.argv) == 3:
cascading_server_id = sys.argv[2]
cascaded_server_id = database_manager.get_cascaded_server_id(cascading_server_id)
print('cascaded server id: %s' % cascaded_server_id)
elif mode == 'add_server_mapping':
if len(sys.argv) == 4:
cascading_server_id = sys.argv[2]
cascaded_server_id = sys.argv[3]
database_manager.add_server_id_mapping(cascading_server_id, cascaded_server_id)
elif mode == 'delete_image_mapping':
if len(sys.argv) == 3:
cascading_image_id = sys.argv[2]
cascaded_image_id = database_manager.delete_image_id_mapping(cascading_image_id)
print('delete image mapping of cascading image: %s' % cascading_image_id)
elif mode == '--help':
print_option()
else:
print('Not support option: %s' % mode)
print_option()
database_manager.close() | {
"repo_name": "Hybrid-Cloud/orchard",
"path": "etc/hybrid_cloud/scripts/patches/patches_tool/hws_patch/database_manager.py",
"copies": "2",
"size": "17965",
"license": "apache-2.0",
"hash": 4760243431761704000,
"line_mean": 36.391025641,
"line_max": 145,
"alpha_frac": 0.5878652936,
"autogenerated": false,
"ratio": 3.598037252153014,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5185902545753014,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Administrator'
def __intval(x, default=0):
""" convert str to int value. Returns: int """
if not x or not str(x).isdigit():
return default
return int(x)
def __decorator_boolval(fn):
""" 将形如'true' 或 'false'的字符串装换为 Python中的布尔值,如果不是则返回原值 """
def wrapper(*args, **kwargs):
r = fn(*args, **kwargs)
if isinstance(r, str):
if r.lower() == 'true':
return True
elif r.lower() == 'false':
return False
return r
return wrapper
def __decorator_smart_text(fn):
def wrapper(*args, **kwargs):
from django.utils import encoding
return encoding.smart_text(fn(*args, **kwargs), strings_only=True)
return wrapper
@__decorator_boolval
@__decorator_smart_text
def __get_param(request, param_name):
""" 返回指定参数的值,不存在则返回None """
return request.GET.get(param_name, request.POST.get(param_name, None))
def __get_param_list(request, param_name):
""" 返回指定参数的值(list),不存在则返回空列表 """
return request.GET.getlist(param_name, request.POST.getlist(param_name, []))
def start_limit(request, limit_default=0):
""" from HttpRequst get start and limit parameter, POST is first。Return: start, start+limit OR None """
start = request.POST.get('start', request.GET.get('start', None))
limit = request.POST.get('limit', request.GET.get('limit', None))
if start is not None and limit is not None:
start, limit = __intval(start, default=0), __intval(limit, default=limit_default)
return start, start + limit
return start, limit
def sort_info(request):
""" 返回排序参数信息。return: dir, sort, ''|'-'sort OR None """
sort = request.POST.get('sort', request.GET.get('sort', None))
dir = request.POST.get('dir', request.GET.get('dir', None))
if sort is not None and dir is not None:
return dir, sort, ('-' if dir.upper() == 'DESC' else '') + sort
return None
def callback(msg):
""" return success responseText """
return {'success': True, 'msg': msg}
def errorcallback(msg='ERROR'):
""" 生成 error responseText。Returns: dict """
return {'success': False, 'msg': msg}
def curpage(total, data_itmes):
""" 为分页store提供数据, 返回字典: {total:XXX, items: XXX} """
return {'total': total, 'items': data_itmes}
def items(value):
""" 为不需要分页的store提供数据。Returns: dict """
return {'items': list(value)}
def getparam(request, *params, list_params=[]):
""" 获取 *params 指定的参数值(GET first).返回: dict(多个参数), object(单个参数), list(单个参数且在list_params中)"""
if len(params) == 1:
p = params[0]
return __get_param_list(request, p) if p in list_params else __get_param(request, p)
kv = dict()
for p in params:
kv[p] = __get_param_list(request, p) if p in list_params else __get_param(request, p)
return kv | {
"repo_name": "HarrisonHDU/myerp",
"path": "apps/myerp/tools/helper.py",
"copies": "1",
"size": "3167",
"license": "mit",
"hash": -1200822336178941400,
"line_mean": 29.5376344086,
"line_max": 107,
"alpha_frac": 0.5991129307,
"autogenerated": false,
"ratio": 3.0917721518987342,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4190885082598734,
"avg_score": null,
"num_lines": null
} |
__author__ = 'admin'
#cepas=['2Ddes', '3PCE1s', '2Dmes', '2DP7s', '2Dres', '2LMGs', '2PCESs', '2PCP1s' , '2TCPAs', '2Y51s', '3LBEs', 'DCB2s', 'DhaTCE1_proteomes.txt' ]
names=['Desulfitobacterium dehalogenans ATCC 51507', 'Desulfitobacterium sp. PCE1','Desulfitobacterium metallireducens DSM 15288', 'Desulfitobacterium hafniense DP-7', 'Dehalobacter restrictus PER-K23', 'Desulfitobacterium dichloroeliminans LMG P-21439', 'Desulfitobacterium hafniense PCE-S', 'Desulfitobacterium hafniense PCP-1', 'Desulfitobacterium hafniense TCP-A', 'Desulfitobacterium hafniense Y51', 'Desulfitobacterium sp. LBE', 'Desulfitobacterium hafniense DCB-2', 'Desulfitobacterium hafniense TCE1']
index= open('index_desulfito.txt', 'r').readlines()
comunes = open('Core.txt','w')
orto = open('OrthologousGroups.txt','r').readlines()
single = open('Singletons.txt','w')
#fasta = open('Fasta.fasta','w')
over = open('Overview.txt','w')
inparoutput = open('Inparalogs.txt','w')
pairsoutput = open('Pairs.txt','w')
ortho= open('Orthologous_Desulfito.xls', 'w')
pairwise=[]
####### quitar # cuando termines orto
#for c in range(0, len(cepas)-1):
# for x in range(1, 12):
# pairwise.append(str(cepas[c])+'-'+str(cepas[x]+'.txt'))
#for i in range(0,len(pairwise)):
# try:
#info=[]
#for g in range(0, len(cepas)):
# proteome=open(cepas[g]+'.fa', 'r').read()
# proteome.split('>')
# for p in proteome:
# info.append(p[:p.index('\n')])
#print info[0:15]
### OrthologousGroups
lineas = []
fila = []
lineabuena=[]
todosorto=[]
orto=orto[4:]
listaidusados=[]
#for i in index: #por cada gen del indice completo de todos los genes de todas las bacterias
for o in orto: #por cada linea de ortologos, sacar el id nada mas
writelinea=[]
fila=o.split('\t')
fila=fila[1:]
for j in fila:
test=0
if ':' in j:
colon=j.index(':')
if j.count(':') > 1:
colon2=j[colon+3:].index(':')+1
colon=colon+colon2+2
id=j[j.index(':')+1:colon]
for i in index: #por cada linea de headers
if id in i: #por cada hit en la fila
test=i.count(id)
listaidusados.append(id)
ortho.write(str(id+'\t'+i[i.find(' ')+1:-1]+'\t'))
if test>1:
print id
#for w in writelinea
#ortho.write(str(writelinea))
ortho.write('\n')
print 'fini' | {
"repo_name": "elecabfer/OMA_github",
"path": "oma_desulfito.py",
"copies": "1",
"size": "2464",
"license": "mit",
"hash": 5721805152057544000,
"line_mean": 40.7796610169,
"line_max": 511,
"alpha_frac": 0.6059253247,
"autogenerated": false,
"ratio": 2.6240681576144835,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.37299934823144837,
"avg_score": null,
"num_lines": null
} |
from django import forms
from MyRangoApp.models import Page, Category
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter the category name.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
# An inline class to provide additional information on the form.
class Meta:
# Provide an association between the ModelForm and a model
model = Category
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
# Provide an association between the ModelForm and a model
model = Page
# What fields do we want to include in our form?
# This way we don't need every field in the model present.
# Some fields may allow NULL values, so we may not want to include them...
# Here, we are hiding the foreign key.
fields = ('title', 'url', 'views')
def clean(self):
cleaned_data = self.cleaned_data
url = cleaned_data.get('url')
#if url is not empty and doesn't start with 'http://', prepend 'http://'.
if url and not url.startwith('url'):
url = 'http://' + url
cleaned_data['url'] = url
return cleaned_data
| {
"repo_name": "octobertech/MyRangoApp",
"path": "MyRangoApp/forms.py",
"copies": "1",
"size": "1555",
"license": "apache-2.0",
"hash": 6230676910855355000,
"line_mean": 37.875,
"line_max": 92,
"alpha_frac": 0.6578778135,
"autogenerated": false,
"ratio": 4.157754010695188,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.02931757500893605,
"num_lines": 40
} |
__author__ = 'admin'
import os
from subprocess import call,Popen
import subprocess
from multiprocessing import Process
import multiprocessing
import re
import traceback
try:
import urllib.request as urllib2
except ImportError:
import urllib2
import logging
from glob import glob
import json
from webmocker.mock_helper import pretender_defaults,MockRequest,MockResponse,HttpMock
LOGGER = logging.getLogger("webmocker")
not_supported_filters = ["doesNotMatch"]
pid = None
server_process = None
def start_pretend(port_number = pretender_defaults.portno):
global server_process
server_process = Process(name='pretend', args=(port_number,),target=pretend)
server_process.start()
def pretend(port_number):
global pid
pid = Popen("python -m pretend_extended.server.server --host 0.0.0.0 --port "+ str(port_number), stdout=subprocess.PIPE, shell=True)
def stop_pretend():
if(server_process != None):
server_process.terminate()
def get_url_from_json(request_json):
url = get_url(request_json)
# handle query params
if( 'queryParameters' not in request_json): return url
query_param = format_query_string(request_json['queryParameters'])
query_param = '('+ query_param + '&?){'+ str(len(request_json['queryParameters'])) +'}'
url = url + ('\?' if query_param!='' else '') + query_param
return url
def get_body_from_json(request_json):
body = request_json['body'] if 'body' in request_json else pretender_defaults.request_body
if( 'bodyPatterns' not in request_json):
return body
body = convert_list_to_dict(request_json['bodyPatterns'])
body_str = ''
if 'matches' in body:
body_str = body_str + body['matches']
if 'doesNotMatch' in body:
body_str = body_str + 'PPP'+ body['doesNotMatch']
return body_str
def get_headers_from_json(request_json):
if('headers' not in request_json):
return {}
headers = convert_json_to_dict(request_json['headers'])
return headers
def convert_json_to_dict(json_element):
# delete_keys(json_element,not_supported_filters)
return { header : get_header_value(value) for header,value in json_element.items()}
def delete_keys(json_element,keys_to_delete):
remove = [header for header,value in json_element.items() if isinstance(value, dict) and key_in_list(value,keys_to_delete)]
for k in remove: del json_element[k]
def convert_list_to_dict(dict_element):
# return [key_value_pair for key_value_pair in dict_element if isinstance(key_value_pair, dict) and key_in_list(key_value_pair,["matches","doesNotMatch"])]
return dict([(key,d[key]) for d in dict_element for key in d])
def key_in_list(value,keys_to_delete):
result = False
for key in keys_to_delete:
result = result or key in value
return result
def get_header_value(value):
if isinstance(value, dict):
if('equalTo' in value): return re.escape(value['equalTo'])
elif('matches' in value): return '.*?'+ value['matches'] +'.*'
elif('contains' in value): return '.*?'+value['contains']+'.*'
elif('doesNotMatch' in value): return 'PPP.*?'+value['doesNotMatch'] +'.*'
return value
def format_query_string(query_params):
query_param = ''
for param,value in query_params.items():
query_param = query_param + ('&?|' if query_param!='' else '') + get_param_value(param,value)
return query_param
def get_param_value(param,value):
if isinstance(value, dict):
if('contains' in value):
return param +'=.*?'+ re.escape(urllib2.quote(value['contains'])).replace('\%','%')+'.*?'
elif('equalto' in value):
return param +'='+ re.escape(urllib2.quote(value['equalto'])).replace('\%','%')
elif('matches' in value):
return param +'='+ value['matches'].replace(' ','%20')
else:
return param +'='+ value.replace(' ','%20')
def get_response_headers_from_json(response_json):
response_headers = {}
if('headers' not in response_json):
return response_headers
for header,value in response_json['headers'].items():
response_headers[header] = value
return response_headers
def process_stubs(stubs):
mock = HttpMock.Mock(pretender_defaults.portno,pretender_defaults.stub_name) # create a HTTPMock Object
for stub in stubs: # iterate for each stub in the json
try:
request = MockRequest.Request()
response = MockResponse.Response()
if ('request' in stub):
request.set_request_entities(stub['request'])
if ('response' in stub):
response.set_response_entities(stub['response'])
mock.mock_request(request,response)
except:
traceback.print_exc()
def process_stub_files(stub_files_path):
for stub_file in glob(stub_files_path+'*.json'): # iterate for each json file
try:
LOGGER.debug(stub_file)
stubs = json.load(open(stub_file))
LOGGER.debug(stub_file)
process_stubs(stubs)
except:
LOGGER.debug('Exception while Processing Json file')
def get_url(request_json):
url = request_json['url'].replace('?','\?')+'$' if 'url' in request_json else pretender_defaults.url
url = request_json['urlPattern'].replace('?','\?') if 'urlPattern' in request_json else url
return request_json['urlPath'].replace('?','\?')+'(/.*)?' if 'urlPath' in request_json else url
def get_response_body_from_json(response_body):
if(os.path.exists(response_body)):
file = open(response_body, 'r')
file_content = file.read()
return file_content
else:
return response_body
| {
"repo_name": "sheltonpaul89/web-mocker3",
"path": "webmocker/mock_helper/pretend_helpers.py",
"copies": "1",
"size": "5754",
"license": "bsd-2-clause",
"hash": -7198610692691711000,
"line_mean": 36.6078431373,
"line_max": 160,
"alpha_frac": 0.6472019465,
"autogenerated": false,
"ratio": 3.6279949558638083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4775196902363808,
"avg_score": null,
"num_lines": null
} |
__author__ = 'admin'
import os
from subprocess import call,Popen
import subprocess
from multiprocessing import Process
import multiprocessing
import re
import urllib2
import logging
import HttpMock
import MockRequest,MockResponse
from glob import glob
import json
from webmocker.mock_helper import pretender_defaults
LOGGER = logging.getLogger("webmocker")
not_supported_filters = ["doesNotMatch"]
pid = None
server_process = None
def start_pretend(port_number = pretender_defaults.portno):
global server_process
server_process = Process(name='pretend', args=(port_number,),target=pretend)
server_process.start()
def pretend(port_number):
global pid
pid = Popen("python -m pretend_extended.server.server --host 0.0.0.0 --port "+ str(port_number), stdout=subprocess.PIPE, shell=True)
def stop_pretend():
if(server_process != None):
server_process.terminate()
def get_url_from_json(request_json):
url = get_url(request_json)
# handle query params
if(request_json.has_key('queryParameters') == False): return url
query_param = format_query_string(request_json['queryParameters'])
query_param = '('+ query_param + '&?){'+ str(len(request_json['queryParameters'])) +'}'
url = url + ('\?' if query_param!='' else '') + query_param
return url
def get_body_from_json(request_json):
body = request_json['body'] if request_json.has_key('body') else pretender_defaults.request_body
if(request_json.has_key('bodyPatterns') == False):
return body
body = convert_list_to_dict(request_json['bodyPatterns'])
body_str = ''
if body.has_key('matches'):
body_str = body_str + body['matches']
if body.has_key('doesNotMatch'):
body_str = body_str + 'PPP'+ body['doesNotMatch']
return body_str
def get_headers_from_json(request_json):
if(request_json.has_key('headers') == False):
return {}
headers = convert_json_to_dict(request_json['headers'])
return headers
def convert_json_to_dict(json_element):
# delete_keys(json_element,not_supported_filters)
return { header : get_header_value(value) for header,value in json_element.items()}
def delete_keys(json_element,keys_to_delete):
remove = [header for header,value in json_element.items() if isinstance(value, dict) and key_in_list(value,keys_to_delete)]
for k in remove: del json_element[k]
def convert_list_to_dict(dict_element):
# return [key_value_pair for key_value_pair in dict_element if isinstance(key_value_pair, dict) and key_in_list(key_value_pair,["matches","doesNotMatch"])]
return dict([(key,d[key]) for d in dict_element for key in d])
def key_in_list(value,keys_to_delete):
result = False
for key in keys_to_delete:
result = result or value.has_key(key)
return result
def get_header_value(value):
if isinstance(value, dict):
if(value.has_key('equalTo')): return re.escape(value['equalTo'])
elif(value.has_key('matches')): return '.*?'+ value['matches'] +'.*'
elif(value.has_key('contains')): return '.*?'+value['contains']+'.*'
elif(value.has_key('doesNotMatch')): return 'PPP.*?'+value['doesNotMatch'] +'.*'
return value
def format_query_string(query_params):
query_param = ''
for param,value in query_params.items():
query_param = query_param + ('&?|' if query_param!='' else '') + get_param_value(param,value)
return query_param
def get_param_value(param,value):
if isinstance(value, dict):
if(value.has_key('contains')):
return param +'=.*?'+ re.escape(urllib2.quote(value['contains'])).replace('\%','%')+'.*?'
elif(value.has_key('equalto')):
return param +'='+ re.escape(urllib2.quote(value['equalto'])).replace('\%','%')
elif(value.has_key('matches')):
return param +'='+ value['matches'].replace(' ','%20')
else:
return param +'='+ value.replace(' ','%20')
def get_response_headers_from_json(response_json):
response_headers = {}
if(response_json.has_key('headers') == False):
return response_headers
for header,value in response_json['headers'].items():
response_headers[header] = value
return response_headers
def process_stubs(stubs):
mock = HttpMock.Mock(pretender_defaults.portno,pretender_defaults.stub_name) # create a HTTPMock Object
for stub in stubs: # iterate for each stub in the json
request = MockRequest.Request()
response = MockResponse.Response()
if (stub.has_key('request')):
request.set_request_entities(stub['request'])
if (stub.has_key('response')):
response.set_response_entities(stub['response'])
mock.mock_request(request,response)
def process_stub_files(stub_files_path):
for stub_file in glob(stub_files_path+'*.json'): # iterate for each json file
try:
stubs = json.load(open(stub_file))
LOGGER.debug(stub_file)
process_stubs(stubs)
except:
LOGGER.debug('Exception while Processing Json file')
def get_url(request_json):
url = request_json['url'].replace('?','\?')+'$' if request_json.has_key('url') else pretender_defaults.url
url = request_json['urlPattern'].replace('?','\?') if request_json.has_key('urlPattern') else url
return request_json['urlPath'].replace('?','\?')+'(/.*)?' if request_json.has_key('urlPath') else url
def get_response_body_from_json(response_body):
if(os.path.exists(response_body)):
file = open(response_body, 'r')
file_content = file.read()
return file_content
else:
return response_body
| {
"repo_name": "sheltonpaul89/web-mocker",
"path": "webmocker/mock_helper/pretend_helpers.py",
"copies": "1",
"size": "5696",
"license": "bsd-2-clause",
"hash": 6915841093868510000,
"line_mean": 37.7482993197,
"line_max": 160,
"alpha_frac": 0.6523876404,
"autogenerated": false,
"ratio": 3.5291201982651796,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.468150783866518,
"avg_score": null,
"num_lines": null
} |
import os
def populate():
python_cat = add_cat(name='Python',
views=128,
likes=64)
add_page(cat=python_cat,
title="Official Python Tutorial",
url="http://docs.python.org/2/tutorial/")
add_page(cat=python_cat,
title="How to Think like a Computer Scientist",
url="http://www.greenteapress.com/thinkpython/")
add_page(cat=python_cat,
title="Learn Python in 10 Minutes",
url="http://www.korokithakis.net/tutorials/python/")
django_cat = add_cat(name="Django",
views=64,
likes=32)
add_page(cat=django_cat,
title="Official Django Tutorial",
url="https://docs.djangoproject.com/en/1.5/intro/tutorial01/")
add_page(cat=django_cat,
title="Django Rocks",
url="http://www.djangorocks.com/")
add_page(cat=django_cat,
title="How to Tango with Django",
url="http://www.tangowithdjango.com/")
frame_cat = add_cat(name="Other Frameworks",
views=32,
likes=16)
add_page(cat=frame_cat,
title="Bottle",
url="http://bottlepy.org/docs/dev/")
add_page(cat=frame_cat,
title="Flask",
url="http://flask.pocoo.org")
# Print out what we have added to the user.
for c in Category.objects.all():
for p in Page.objects.filter(category=c):
print "- {0} - {1}".format(str(c), str(p))
def add_page(cat, title, url, views=0):
p = Page.objects.get_or_create(category=cat, title=title, url=url, views=views)[0]
return p
def add_cat(name, views, likes):
c = Category.objects.get_or_create(name=name, views=views, likes=likes)[0]
return c
# Start execution here!
if __name__ == '__main__':
print "Starting MyRango population script..."
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'MyRango.settings')
from MyRangoApp.models import Category, Page
populate() | {
"repo_name": "octobertech/MyRangoApp",
"path": "populate_myrangoapp.py",
"copies": "1",
"size": "2021",
"license": "apache-2.0",
"hash": 8210041294670596000,
"line_mean": 29.1791044776,
"line_max": 86,
"alpha_frac": 0.5888174171,
"autogenerated": false,
"ratio": 3.4196277495769882,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4508445166676988,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Admin'
import utils as john
# list of holes in Complete_Geophysics.csv
# ['DD0509' 'DD0541' 'DD0542' 'DD0544' 'DD0551' 'DD0473' 'DD0409' 'DD0415'
# 'DD0980A' 'DD0989' 'DD1000' 'DD0991' 'DD1006' 'DD1005' 'DD1010' 'DD0992'
# 'DD1012' 'DD1013' 'DD1014' 'DD1070' 'DD1073' 'DD1077' 'DD1080' 'DD1081'
# 'DD1083' 'DD1082' 'DD1083A' 'DD1086A' 'DD1091' 'DD1095' 'DD1097' 'DD1098'
# 'DD1099' 'DD1100' 'DD1097A' 'DD1101' 'DD1102' 'DD1103' 'DD1105' 'DD1104'
# 'DD1106' 'DD1107' 'DD1104A' 'DD1108' 'DD1110' 'DD1111' 'DD1112' 'DD1113'
# '\x1a']
def n_holes(df):
return len(df.HOLEID.unique())
def extract_holes(HOLEID):
import pandas as pd
import os
if os.path.isfile('%s.csv'%HOLEID):
subset = pd.read_csv('%s.csv'%HOLEID)
else:
geo = pd.read_csv('Complete_Geophysics.csv')
hole = geo.query('HOLEID == "%s"'%HOLEID)
subset = hole[['DEPTH','DENB','DENL','GRDE', 'LSDU']].sort('DEPTH')
subset.to_csv('%s.csv'%HOLEID, index = False)
return subset
def extract_peak_loc(hole, holeID):
response_th = 1000
# window_size = 1# meters
window_size = 4# meters
peak_flag = [0]*len(hole['DEPTH'])
seam_list = [] # list of holes
nRows = len(hole)
coal_seam_bound_start = False
for i,depth in enumerate(hole['DEPTH']):
if i%200 == 0:
print( '%s progress: %i/%i'%(holeID, i, nRows))
# if depth > 80: # start looking at 80 meters
if depth > 90: # start looking at 80 meters
# get the indexes within the scan window, this is very slow, maybe faster query?
window_idx = hole[(hole['DEPTH'] >= (depth - window_size/2.0)) & ((hole['DEPTH'] <= (depth + window_size/2.0)))].index.tolist()
bottom =depth - window_size/2.0
top = depth + window_size/2.0
# atv = hole.query('DEPTH > @bottom and DEPTH <= @top')['LSDU'].mean()
# print hole['LSDU'][window_idx].mean()
if hole['LSDU'][window_idx].mean() > response_th:
# if hole.query('DEPTH > @bottom and DEPTH <= @top')['LSDU'].mean() > response_th:
peak_flag[i] = 10000
if coal_seam_bound_start == False:
seam_prop = [depth]
coal_seam_bound_start = True
# print 'ich bin hier'
elif coal_seam_bound_start == True:
# print 'ich bin wieder hier'
seam_prop.append(depth) # add the end depth
seam_list.append(seam_prop) # add hole [start end] to hole list
seam_prop = [] # reset hole [start end]
coal_seam_bound_start = False
# if hole['LSDU'][i] > response_th:
# peak_flag[i] = 10000
hole['Flag'] = peak_flag
total_depth = depth
coal_depth = 0
for coal_seam in seam_list:
coal_depth += (coal_seam[1] - coal_seam[0])
coal_percentage = coal_depth/total_depth
# write to txt
f = open('%s.txt'%holeID, 'w')
f.write('Coal Percentage: %s\n'%coal_percentage)
f.write('Coal Depth: %s\n'%coal_depth)
f.write('Total Depth: %s\n'%total_depth)
f.write('Seam Structure: %s'%seam_list)
f.close()
# write to json
out_dict = {}
out_dict['Coal Percentage'] = coal_percentage
out_dict['Coal Depth'] = coal_depth
out_dict['Total Depth'] = total_depth
out_dict['Seam Structure'] = seam_list
import json
with open('%s.json'%holeID,'w') as fp:
json.dump(out_dict, fp)
return seam_list
def extract_seams(bore_id, seam_list = []):
import numpy as np
# depth = seam_list[0][0]
print('Extracting {}'.format(bore_id))
top = 100
bottom = 400
window_size = bottom-top
mid = (top+bottom)/2.0
bin_size = 0.1
try:
df_data = john.get_data(boreid = bore_id, centre_point = mid, window_size = window_size, bin_width = bin_size)
except Exception as e:
print('Exception raised! {}'.format(e))
return
df_data.to_csv('%s_cleandata.csv'%bore_id, ignore_index=True)
return df_data
# ['ADEN', 'GRDE', 'DENB', 'LSDU', 'acoustic']
# hole data exist in both geophysics and acoustic scanner
# ['DD0541' 'DD0542' 'DD0551'
# 'DD0980A' 'DD0989' 'DD1000' 'DD0991' 'DD1006' 'DD1005' 'DD1010' 'DD0992'
# 'DD1012' 'DD1013' 'DD1014' 'DD1070' 'DD1073' 'DD1077' 'DD1080' 'DD1081'
# 'DD1083' 'DD1082' 'DD1083A' 'DD1086A' 'DD1091' 'DD1095' 'DD1097' 'DD1098'
# 'DD1099' 'DD1100' 'DD1097A' 'DD1101' 'DD1102' 'DD1103' 'DD1105' 'DD1104'
# 'DD1106' 'DD1107' 'DD1104A' 'DD1108' 'DD1110' 'DD1111' 'DD1112' 'DD1113'
# '\x1a']
if __name__ == '__main__':
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
holeId = [
'DD1097',
'DD1098',
'DD1099',
'DD1100',
'DD1101',
'DD1102',
'DD1103', 'DD1104', 'DD1105', 'DD1106',
'DD1107', 'DD1108',
'DD0541',
'DD0542',
'DD0551',
'DD0980A',
'DD0989',
'DD0991',
'DD0992',
'DD1000',
'DD1005',
'DD1006',
'DD1010',
'DD1012',
'DD1013',
'DD1014']
# extract_seams(bore_id = holeID, seam_list = hole_boundaries)
result = pd.concat([extract_seams(bore_id=h) for h in holeId], ignore_index=True)
result.to_csv('all_data.csv', index=False)
| {
"repo_name": "johnny555/2d3g",
"path": "main.py",
"copies": "1",
"size": "5336",
"license": "bsd-2-clause",
"hash": -6046389166866133000,
"line_mean": 27.5347593583,
"line_max": 139,
"alpha_frac": 0.5708395802,
"autogenerated": false,
"ratio": 2.7058823529411766,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.37767219331411767,
"avg_score": null,
"num_lines": null
} |
__author__ = 'admin'
class ContactHelper:
def __init__(self, app):
self.app = app
def create(self, contact):
self.fill_contact_fields(contact)
def modify(self, contact):
# modify contact
self.click_edit_button()
self.fill_contact_fields(contact)
def set_field_value(self, field_name, value):
if value is not None:
wd = self.app.wd
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(value)
def select_list_item(self, list_id, value):
if value is not None:
wd = self.app.wd
xpath = "//div[@id='content']/form/select[" + list_id + "]//option[" + value + "]"
if not wd.find_element_by_xpath(xpath).is_selected():
wd.find_element_by_xpath(xpath).click()
def fill_contact_fields(self, contact):
wd = self.app.wd
wd.find_element_by_link_text("add new").click()
self.set_field_value("firstname", contact.firstname)
self.set_field_value("middlename", contact.middlename)
self.set_field_value("lastname", contact.lastname)
self.set_field_value("nickname", contact.nickname)
self.set_field_value("title", contact.title)
self.set_field_value("company", contact.company)
self.set_field_value("address", contact.address)
self.set_field_value("home", contact.phone_home)
self.set_field_value("mobile", contact.phone_mobile)
self.set_field_value("work", contact.phone_work)
self.set_field_value("fax", contact.fax)
self.set_field_value("email", contact.email_first)
self.set_field_value("email2", contact.email_second)
self.set_field_value("email3", contact.email_third)
self.set_field_value("homepage", contact.homepage)
self.set_field_value("homepage", contact.homepage)
self.select_list_item("1", contact.birth_day_list_item)
self.select_list_item("2", contact.birth_month_list_item)
self.set_field_value("byear", contact.birth_year)
self.select_list_item("3", contact.anniversary_day_list_item)
self.select_list_item("4", contact.anniversary_month_list_item)
self.set_field_value("ayear", contact.anniversary_year)
self.set_field_value("address2", contact.second_address)
self.set_field_value("phone2", contact.second_phone)
self.set_field_value("notes", contact.notes)
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
def delete_first_contact(self):
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
def delete_all_contacts(self):
wd = self.app.wd
mass_checkbox = wd.find_element_by_id("MassCB")
if not mass_checkbox.is_selected():
mass_checkbox.click()
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
def click_edit_button(self):
wd = self.app.wd
wd.find_element_by_css_selector("img[alt=\"Edit\"]").click()
def count(self):
wd = self.app.wd
return len(wd.find_elements_by_name("selected[]"))
| {
"repo_name": "dimchenkoAlexey/python_training",
"path": "fixture/contact.py",
"copies": "1",
"size": "3347",
"license": "apache-2.0",
"hash": 5989230038582148000,
"line_mean": 38.8452380952,
"line_max": 94,
"alpha_frac": 0.6187630714,
"autogenerated": false,
"ratio": 3.4187946884576097,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.453755775985761,
"avg_score": null,
"num_lines": null
} |
__author__ = 'admin'
class SessionHelper:
def __init__(self, app):
self.app = app
def login(self, username, password):
wd = self.app.wd
self.app.navigation.open_home_page()
# login
wd.find_element_by_name("user").click()
wd.find_element_by_name("user").clear()
wd.find_element_by_name("user").send_keys(username)
wd.find_element_by_name("pass").click()
wd.find_element_by_name("pass").clear()
wd.find_element_by_name("pass").send_keys(password)
wd.find_element_by_css_selector("input[type=\"submit\"]").click()
def safe_login(self, username, password):
# logout
wd = self.app.wd
if self.is_logged_in():
if self.is_logged_in_as_user(username):
return
else:
self.logout()
self.login(username, password)
def logout(self):
# logout
wd = self.app.wd
wd.find_element_by_link_text("Logout").click()
def safe_logout(self):
# logout
wd = self.app.wd
if self.is_logged_in():
self.logout()
def is_logged_in(self):
wd = self.app.wd
return len(wd.find_elements_by_link_text("Logout")) > 0
def is_logged_in_as_user(self, username):
wd = self.app.wd
return wd.find_element_by_xpath("//div/div[1]/form/b").text == "("+ username +")"
| {
"repo_name": "dimchenkoAlexey/python_training",
"path": "fixture/session.py",
"copies": "1",
"size": "1420",
"license": "apache-2.0",
"hash": 5658171582653878000,
"line_mean": 29.8695652174,
"line_max": 89,
"alpha_frac": 0.5535211268,
"autogenerated": false,
"ratio": 3.429951690821256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4483472817621256,
"avg_score": null,
"num_lines": null
} |
__author__ = 'admin'
import json
import os
from glob import glob
import logging
import time
import webmocker.mock_helper.pretender_defaults
import webmocker.mock_helper.pretend_helpers
from webmocker.mock_helper import HttpMock, pretender_defaults,MockRequest,MockResponse, pretend_helpers
LOGGER = logging.getLogger("webmocker")
def start(port_number = pretender_defaults.portno,stub_name = pretender_defaults.stub_name):
pretender_defaults.portno = port_number
pretender_defaults.stub_name = stub_name
restart_pretend(port_number) # Stopping and Starting the pretend_extended
stub_files_path = get_stub_files_path()
time.sleep(3)
pretend_helpers.process_stub_files(stub_files_path)
def stop():
pretend_helpers.stop_pretend()
def restart_pretend(port_number):
pretend_helpers.stop_pretend()
pretend_helpers.start_pretend(port_number)
def get_stub_files_path():
return pretender_defaults.stub_files_path if os.environ.has_key("stub_files_path")==False else os.environ["stub_files_path"]
| {
"repo_name": "sheltonpaul89/web-mocker",
"path": "webmocker/stubbing_engine.py",
"copies": "1",
"size": "1057",
"license": "bsd-2-clause",
"hash": 8382540957883044000,
"line_mean": 32.03125,
"line_max": 129,
"alpha_frac": 0.7492904447,
"autogenerated": false,
"ratio": 3.3662420382165603,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.461553248291656,
"avg_score": null,
"num_lines": null
} |
__author__ = 'admin'
import ply.lex as lex
PYTHON_KEYWORDS = [
'and'
, 'as'
, 'assert'
, 'break'
, 'class'
, 'continue'
, 'def'
, 'del'
, 'elif'
, 'else'
, 'except'
, 'exec'
, 'finally'
, 'for'
, 'from'
, 'global'
, 'if'
, 'import'
, 'in'
, 'is'
, 'lambda'
, 'not'
, 'or'
, 'pass'
, 'print'
, 'raise'
, 'return'
, 'try'
, 'while'
, 'with'
, 'yield'
]
class PythonTokenizer:
MY_KEYWORDS = PYTHON_KEYWORDS
RESERVED = {kw: kw for kw in MY_KEYWORDS}
tokens = RESERVED.values() + [
'ID'
, 'STRING_LITERAL'
, 'NUMBER'
, 'COMMENT'
, 'LINE_COMMENT'
]
def check_comment(fn):
def wrapped(self, t):
if self.nested_comment:
t.type = 'COMMENT'
return t
else:
return fn(self, t)
wrapped.__doc__ = fn.__doc__
return wrapped
@check_comment
def t_ID(self, t):
t.type = self.__class__.RESERVED.get(t.value, 'ID')
return t
@check_comment
def t_STRING_LITERAL(self, t):
return t
@check_comment
def t_NUMBER(self, t):
return t
def t_LINE_COMMENT(self, t):
t.type = 'COMMENT'
return t
t_ignore = ' \t'
def t_error(self, t):
# self.skipped.append(t.value)
t.lexer.skip(1)
def __init__(self, **kwargs):
self.t_ID.__func__.__doc__ = r'[a-zA-z_][a-zA-Z0-9_]*'
self.t_STRING_LITERAL.__func__.__doc__ = r'\'.*\''
self.t_NUMBER.__func__.__doc__ = r'\d+'
self.t_LINE_COMMENT.__func__.__doc__ = r'\#.*'
self.skipped = []
self.nested_comment = 0
self.lexer = lex.lex(module=self, **kwargs)
def refresh(self):
self.skipped = []
self.nested_comment = 0
def tokenize(self, data):
self.lexer.input(data)
self.out_token_dict = {}
while True:
tok = self.lexer.token()
if not tok: break
self.out_token_dict[tok.type] = self.out_token_dict.get(tok.type, [])
self.out_token_dict[tok.type].append(tok)
return self.out_token_dict
def keywords_ex_stats(self, extra_type_list=[]):
keys = PythonTokenizer.MY_KEYWORDS + extra_type_list
return {k: self.out_token_dict.get(k, []) for k in keys} | {
"repo_name": "sayon/ignoreme",
"path": "tokenizers/pythonlex/pythonsmartlex.py",
"copies": "1",
"size": "2427",
"license": "mit",
"hash": -6976706903152421000,
"line_mean": 20.6785714286,
"line_max": 81,
"alpha_frac": 0.4857849197,
"autogenerated": false,
"ratio": 3.2446524064171123,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9114858660157414,
"avg_score": 0.02311573319193964,
"num_lines": 112
} |
__author__ = 'admin'
import urllib2, base64
from models import AccountStatement
CHARGES_DICTIONARY = {
# Colegiatura
'3010': 'Tuition',
'5510': 'Tuition',
'5520': 'Tuition',
'5550': 'Tuition',
'8010': 'Tuition',
'9010': 'Tuition',
'9020': 'Tuition',
'9050': 'Tuition',
'9101': 'Tuition',
'9107': 'Tuition',
'9180': 'Tuition',
'9190': 'Tuition',
'9801': 'Tuition',
'9802': 'Tuition',
# Intereses Comisiones y penalidades
'40': 'icp',
'681': 'icp',
'682': 'icp',
'683': 'icp',
'9103': 'icp',
'9109': 'icp',
'9110': 'icp',
'9120': 'icp',
'9150': 'icp',
'9210': 'icp',
'9220': 'icp',
'9250': 'icp',
'9410': 'icp',
'9420': 'icp',
'9450': 'icp',
# Saldo a favor
'50': 'saldo',
'500': 'saldo',
# Seguros
'9000': 'seguros',
'9111': 'seguros',
# Servicios diversos
'661': 'servicios',
'1607': 'servicios',
'1613': 'servicios'
}
class StatementProcessor:
@staticmethod
def search_for_statements_by_student (list, student_id):
matches = [x for x in list if x.statement_student == student_id]
return matches
@staticmethod
def process_transactions(statements, student):
tmp_services = 0.0
tmp_insurance = 0.0
tmp_credit = 0.0
tmp_interests = 0.0
tmp_tuition = 0.0
for statement in statements:
# Checking for tuition
if statement.statement_code == '3010' \
or statement.statement_code == '5510' \
or statement.statement_code == '5520' \
or statement.statement_code == '5550' \
or statement.statement_code == '8010' \
or statement.statement_code == '9010' \
or statement.statement_code == '9020' \
or statement.statement_code == '9050' \
or statement.statement_code == '9101' \
or statement.statement_code == '9107' \
or statement.statement_code == '9180' \
or statement.statement_code == '9190' \
or statement.statement_code == '9801' \
or statement.statement_code == '9802':
tmp_tuition += float(statement.statement_value)
# Checking for ICP's
elif statement.statement_code == '3010' \
or statement.statement_code == '40' \
or statement.statement_code == '681' \
or statement.statement_code == '682' \
or statement.statement_code == '683' \
or statement.statement_code == '9103' \
or statement.statement_code == '9109' \
or statement.statement_code == '9110' \
or statement.statement_code == '9120' \
or statement.statement_code == '9150' \
or statement.statement_code == '9210' \
or statement.statement_code == '9220' \
or statement.statement_code == '9250' \
or statement.statement_code == '9410' \
or statement.statement_code == '9420' \
or statement.statement_code == '9450':
tmp_interests += float(statement.statement_value)
# Checking for credit
elif statement.statement_code == '50' or statement.statement_code == '500':
tmp_credit += float(statement.statement_value)
# Checking for Insurance
elif statement.statement_code == '9000' or statement.statement_code == '9111':
tmp_insurance += float(statement.statement_value)
# Checking for services
elif statement.statement_code == '661' \
or statement.statement_code == '1607' \
or statement.statement_code == '1613':
tmp_services += float(statement.statement_value)
account_statement = AccountStatement(student,
statement.statement_date,
abs(tmp_tuition),
abs(tmp_interests),
abs(tmp_credit),
abs(tmp_insurance),
abs(tmp_services))
#print account_statement.to_json()
account_statement.upload() | {
"repo_name": "EnriqueRE/Estado-de-Cuenta",
"path": "Transaction Uploader/StatementProcessor.py",
"copies": "1",
"size": "4586",
"license": "apache-2.0",
"hash": -1618125944888663800,
"line_mean": 36.9090909091,
"line_max": 90,
"alpha_frac": 0.4925861317,
"autogenerated": false,
"ratio": 3.923011120615911,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.991002320564217,
"avg_score": 0.001114809334748216,
"num_lines": 121
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.