hexsha
stringlengths
40
40
size
int64
2
1.02M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.02M
avg_line_length
float64
1
417k
max_line_length
int64
1
987k
alphanum_fraction
float64
0
1
content_no_comment
stringlengths
0
1.01M
is_comment_constant_removed
bool
1 class
is_sharp_comment_removed
bool
1 class
f703d2a9f3424c2ad908f8b302031b072df6c8fa
426
py
Python
rfapi/test/test_errors.py
cestrada-rf/rfapi-python
ca813d652fbfd02e90df2d4b7d280a981bd8d5c3
[ "Apache-2.0" ]
32
2016-12-09T13:31:31.000Z
2022-02-12T00:59:44.000Z
rfapi/test/test_errors.py
cestrada-rf/rfapi-python
ca813d652fbfd02e90df2d4b7d280a981bd8d5c3
[ "Apache-2.0" ]
5
2019-03-21T10:01:19.000Z
2020-02-13T14:06:54.000Z
rfapi/test/test_errors.py
cestrada-rf/rfapi-python
ca813d652fbfd02e90df2d4b7d280a981bd8d5c3
[ "Apache-2.0" ]
10
2018-06-25T15:49:28.000Z
2021-12-03T12:39:44.000Z
import unittest from rfapi.error import JsonParseError, MissingAuthError class ApiClientTest(unittest.TestCase): def test_json_parse_error(self): resp = type('', (object,), {"content": ""})() msg = "Could not parse" e = JsonParseError(msg, resp) self.assertEqual(str(e), msg) def test_missing_auth_error(self): e = MissingAuthError() self.assertTrue("API" in str(e))
28.4
56
0.650235
import unittest from rfapi.error import JsonParseError, MissingAuthError class ApiClientTest(unittest.TestCase): def test_json_parse_error(self): resp = type('', (object,), {"content": ""})() msg = "Could not parse" e = JsonParseError(msg, resp) self.assertEqual(str(e), msg) def test_missing_auth_error(self): e = MissingAuthError() self.assertTrue("API" in str(e))
true
true
f703d2c19fb0503f4ba11caa93e47284e1be03ce
899
py
Python
src/awkward/_v2/operations/describe/is_valid.py
scikit-hep/awkward-1.0
a440328f8097d22c2ba053fd117fed543829afc0
[ "BSD-3-Clause" ]
519
2019-10-17T12:36:22.000Z
2022-03-26T23:28:19.000Z
src/awkward/_v2/operations/describe/is_valid.py
scikit-hep/awkward-1.0
a440328f8097d22c2ba053fd117fed543829afc0
[ "BSD-3-Clause" ]
924
2019-11-03T21:05:01.000Z
2022-03-31T22:44:30.000Z
src/awkward/_v2/operations/describe/is_valid.py
scikit-hep/awkward-1.0
a440328f8097d22c2ba053fd117fed543829afc0
[ "BSD-3-Clause" ]
56
2019-12-17T15:49:22.000Z
2022-03-09T20:34:06.000Z
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE from __future__ import absolute_import import awkward as ak np = ak.nplike.NumpyMetadata.instance() def is_valid(array, exception=False): pass # """ # Args: # array (#ak.Array, #ak.Record, #ak.layout.Content, #ak.layout.Record, #ak.ArrayBuilder, #ak.layout.ArrayBuilder): # Array or record to check. # exception (bool): If True, validity errors raise exceptions. # Returns True if there are no errors and False if there is an error. # Checks for errors in the structure of the array, such as indexes that run # beyond the length of a node's `content`, etc. Either an error is raised or # the function returns a boolean. # See also #ak.validity_error. # """ # out = validity_error(array, exception=exception) # return out is None
29.966667
122
0.679644
from __future__ import absolute_import import awkward as ak np = ak.nplike.NumpyMetadata.instance() def is_valid(array, exception=False): pass # Args: # array (#ak.Array, #ak.Record, #ak.layout.Content, #ak.layout.Record, #ak.ArrayBuilder, #ak.layout.ArrayBuilder): # Array or record to check. # exception (bool): If True, validity errors raise exceptions. # Returns True if there are no errors and False if there is an error. # Checks for errors in the structure of the array, such as indexes that run # beyond the length of a node's `content`, etc. Either an error is raised or # the function returns a boolean. # See also #ak.validity_error. # """ # out = validity_error(array, exception=exception) # return out is None
true
true
f703d2e046e620c84768df767fae33740a8a8fed
1,338
py
Python
project/correctiondemos/pythag_test.py
gorff/Toric-Code-Correlated-Error-Decoder
c43cf34c22f03334add078f5d02e6604e5c89cba
[ "MIT" ]
1
2017-11-21T02:55:30.000Z
2017-11-21T02:55:30.000Z
project/correctiondemos/pythag_test.py
gorff/Toric-Code-Correlated-Error-Decoder
c43cf34c22f03334add078f5d02e6604e5c89cba
[ "MIT" ]
null
null
null
project/correctiondemos/pythag_test.py
gorff/Toric-Code-Correlated-Error-Decoder
c43cf34c22f03334add078f5d02e6604e5c89cba
[ "MIT" ]
null
null
null
import matplotlib.pyplot as plt import matplotlib.mlab as mlab import numpy as np import os,sys,inspect import imageio sys.path.insert(1, os.path.join(sys.path[0], '..')) #go up a dir to import import CodePy2.funmath as funmath #import imageio n = 1.0 sizes = [i/n for i in range(33*int(n))] xvals = sizes filenames = [] for expectedlength in sizes: yvals = [] fig = plt.figure() for i in sizes: variance = 1 strength = 1 yvals.append(funmath.getnormval(i,expectedlength,strength,variance)) maxval = mlab.normpdf(expectedlength, expectedlength, np.sqrt(variance)) #yvals[-1] = yvals[-1]*strength/maxval plt.plot(xvals,yvals) plt.grid(True) plt.ylabel('Adjusted weight (A)') plt.xlabel('Manhatten distance (M)') plt.axis([0, 30, 0, 30]) plt.title('Gaussian adjusted matching distances') plt.suptitle('variance = '+str(variance)+', w = '+str(expectedlength)) filename = 'gaussian/'+'gaussian-'+str(int(expectedlength*n))+'.png' plt.savefig(filename) filenames.append(filename) plt.close() #plt.show() #os.system("avconv -y -f image2 -i figs/gaussian-%d.png -r 10 -s 800x600 gaussianvideo.avi") #turn into gif images = [] for filename in filenames: images.append(imageio.imread(filename)) imageio.mimsave('xbar_demo.gif', images)
29.086957
92
0.675635
import matplotlib.pyplot as plt import matplotlib.mlab as mlab import numpy as np import os,sys,inspect import imageio sys.path.insert(1, os.path.join(sys.path[0], '..')) import CodePy2.funmath as funmath n = 1.0 sizes = [i/n for i in range(33*int(n))] xvals = sizes filenames = [] for expectedlength in sizes: yvals = [] fig = plt.figure() for i in sizes: variance = 1 strength = 1 yvals.append(funmath.getnormval(i,expectedlength,strength,variance)) maxval = mlab.normpdf(expectedlength, expectedlength, np.sqrt(variance)) plt.plot(xvals,yvals) plt.grid(True) plt.ylabel('Adjusted weight (A)') plt.xlabel('Manhatten distance (M)') plt.axis([0, 30, 0, 30]) plt.title('Gaussian adjusted matching distances') plt.suptitle('variance = '+str(variance)+', w = '+str(expectedlength)) filename = 'gaussian/'+'gaussian-'+str(int(expectedlength*n))+'.png' plt.savefig(filename) filenames.append(filename) plt.close() images = [] for filename in filenames: images.append(imageio.imread(filename)) imageio.mimsave('xbar_demo.gif', images)
true
true
f703d2f1fc5bfee57fcd5d4184762ae275517939
11,005
py
Python
models/Transformer.py
SunYanCN/nlp-experiments-in-pytorch
5d05a53146dffd707e4d037230656f980d7be05c
[ "MIT" ]
35
2018-11-19T14:08:16.000Z
2021-12-23T22:51:43.000Z
models/Transformer.py
SunYanCN/nlp-experiments-in-pytorch
5d05a53146dffd707e4d037230656f980d7be05c
[ "MIT" ]
null
null
null
models/Transformer.py
SunYanCN/nlp-experiments-in-pytorch
5d05a53146dffd707e4d037230656f980d7be05c
[ "MIT" ]
11
2019-01-12T06:48:39.000Z
2021-05-09T16:59:58.000Z
import copy import math import matplotlib.pyplot as plt import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torch.autograd import Variable from utils.utils import clones class LayerNormGoogle(nn.Module): def __init__(self, features, epsilon=1e-6): super(LayerNormGoogle, self).__init__() self.a_2 = nn.Parameter(torch.ones(features)) self.b_2 = nn.Parameter(torch.zeros(features)) self.epsilon = epsilon def forward(self, x): mean = x.mean(-1, keepdim=True) std = x.std(-1, keepdim=True) return self.a_2 * (x - mean) / (std + self.epsilon) + self.b_2 class EncoderBlockGoogle(nn.Module): def __init__(self, layer, num_layers): super(EncoderBlockGoogle, self).__init__() self.layers = clones(layer, num_layers) self.norm = LayerNormGoogle(layer.size) def forward(self, x, mask): for layer in self.layers: x = layer(x, mask) return self.norm(x) class ResidualConnectionGoogle(nn.Module): def __init__(self, size, keep_prob): super(ResidualConnectionGoogle, self).__init__() self.norm = LayerNormGoogle(size) # TODO: Use dropout interface self.dropout = nn.Dropout(keep_prob) def forward(self, input, sublayer): return input + self.dropout(sublayer(self.norm(input))) class EncoderLayerGoogle(nn.Module): def __init__(self, size, attention, feed_forward, keep_prob): super(EncoderLayerGoogle, self).__init__() self.size = size self.attention = attention self.feed_forward = feed_forward # Each encoder layer has two sublayers self.sublayer = clones(ResidualConnectionGoogle(size, keep_prob), 2) def forward(self, x, mask): x = self.sublayer[0](x, lambda x: self.attention(x, x, x, mask)) return self.sublayer[1](x, self.feed_forward) class EncoderClassifier(nn.Module): def __init__(self, embedding, encoder, classifier, device, is_average=True): super(EncoderClassifier, self).__init__() self.embedding = embedding self.encoder = encoder self.classifier = classifier self.device = device self.is_average = is_average def forward(self, x, mask=None): kl_loss = torch.Tensor([0.0]) # Initial x.size() = [length, batch_size] x = x.permute(1, 0) # After permute x.size = [batch_size, length] x = self.embedding(x) if "cuda" in str(self.device): x = x.cuda() kl_loss = kl_loss.cuda() x = self.encoder(x, mask) if self.is_average: # Averaged sentence representation x = torch.mean(x, dim=1) x = self.classifier(x) return x, kl_loss class Classifier(nn.Module): def __init__(self, d_model, d_hidden, num_classes, keep_prob): super(Classifier, self).__init__() self.linear1 = nn.Linear(d_model, d_hidden) self.dropout = nn.Dropout(keep_prob) self.relu = nn.ReLU() self.linear2 = nn.Linear(d_hidden, num_classes) def forward(self, x): x = self.dropout(self.relu(self.linear1(x))) x = self.linear2(x) return x class MultiHeadedAttentionGoogle(nn.Module): def __init__(self, heads=8, d_model=512, keep_prob=0.1): super(MultiHeadedAttentionGoogle, self).__init__() assert d_model % heads == 0 self.d_k = d_model // heads self.heads = heads self.linears = clones(nn.Linear(d_model, d_model), 4) self.attn = None self.dropout = nn.Dropout(keep_prob) def attention(self, query, key, value, mask=None): # Dot product attention d_k = query.size(-1) scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) if mask is not None: scores = scores.masked_fill(mask == 0, -1e9) p_attn = F.softmax(scores, dim=-1) if self.dropout is not None: p_attn = self.dropout(p_attn) return torch.matmul(p_attn, value), p_attn def forward(self, query, key, value, mask=None): num_batches = query.size(0) if mask is not None: mask = mask.unsqueeze(1) # Apply linear projection on the input sequence and split the heads. query, key, value = [linear(x).view(num_batches, -1, self.heads, self.d_k).transpose(1, 2) for linear, x in zip(self.linears, (query, key, value))] # Apply attention on the projected and splitted vectors x, self.attn = self.attention(query, key, value, mask=mask) # Concat vectors and apply linear x = x.transpose(1, 2).contiguous().view(num_batches, -1, self.heads * self.d_k) return self.linears[-1](x) class PositionalFeedForwardGoogle(nn.Module): def __init__(self, d_model, d_ff, keep_prob=0.1): super(PositionalFeedForwardGoogle, self).__init__() self.w_1 = nn.Linear(d_model, d_ff) self.w_2 = nn.Linear(d_ff, d_model) self.dropout = nn.Dropout(keep_prob) self.relu = nn.ReLU() def forward(self, input): return self.w_2(self.dropout(self.relu(self.w_1(input)))) class Embeddings(nn.Module): def __init__(self, embed_dim, vocab_size, padding_id, use_pretrained_embed, pretrained_weights, optional_sqrt_mul=False): super(Embeddings, self).__init__() # Initialize embeddings self.embedding = nn.Embedding(vocab_size, embed_dim, padding_idx=padding_id).cpu() if use_pretrained_embed: self.embedding.from_pretrained(pretrained_weights) self.embed_dim = embed_dim self.optional_sqrt_mul = optional_sqrt_mul def forward(self, input): if self.optional_sqrt_mul: return self.embedding(input) * math.sqrt(self.embed_dim) else: return self.embedding(input) class PositionalEncodingGoogle(nn.Module): def __init__(self, d_model, keep_prob=0.1, max_len=5000): super(PositionalEncodingGoogle, self).__init__() self.dropout = nn.Dropout(keep_prob) positional_encoding = torch.zeros(max_len, d_model) pos = torch.arange(0., max_len).unsqueeze(1) # Log space div_term = torch.exp(torch.arange(0., d_model, 2) * (-math.log(10000) / d_model)) positional_encoding[:, 0::2] = torch.sin(pos * div_term) positional_encoding[:, 1::2] = torch.cos(pos * div_term) positional_encoding = positional_encoding.unsqueeze(0) self.register_buffer("pe", positional_encoding) def forward(self, input): return self.dropout(input + Variable(self.pe[:, :input.size(1)], requires_grad=False)) class TransformerGoogle: def __init__(self, args): super(TransformerGoogle, self).__init__() self.args_common = args["common_model_properties"] self.args_specific = args["transformer_google"] # Device self.device = self.args_common["device"] # Input/Output dimensions self.vocab_size = self.args_common["vocab_size"] self.embed_dim = self.args_common["embed_dim"] self.num_class = self.args_common["num_class"] # Embedding parameters self.padding_id = self.args_common["padding_id"] # Condition parameters self.use_pretrained_embed = self.args_common["use_pretrained_embed"] self.use_embed_sqrt_mul = self.args_specific["use_embed_sqrt_mul"] # Pretrained embedding weights self.pretrained_weights = self.args_common["pretrained_weights"] # Dropout probabilities for each individual part of the full model. self.keep_prob_encoder = self.args_specific["keep_prob_encoder"] self.keep_prob_pe = self.args_specific["keep_prob_pe"] self.kee_prob_pff = self.args_specific["keep_prob_pff"] self.keep_prob_attn = self.args_specific["keep_prob_attn"] self.keep_prob_clf = self.args_specific["keep_prob_clf"] # Condition parameter for the transformer type (It only supports classification for now) self.transformer_type = self.args_specific["transformer_type"] # Number of parallel attention layers for MultiHeadedAttention self.heads = self.args_specific["heads"] # Number of encoder layers self.num_encoder_layers = self.args_specific["num_encoder_layers"] # Number of hidden count units for Position-Wise Feed-Forward Network self.num_hidden_pos_ff = self.args_specific["num_hidden_pos_ff"] # Maximum length of an input self.max_length = self.args_specific["max_length"] if self.transformer_type == "classifier": self.model = self.create_classifier_transformer() else: raise ValueError("Transformer can be created as classifier for now!") def create_classifier_transformer(self): c = copy.deepcopy # Initialize individual parts of the full model # attention = torch.nn.MultiheadAttention(num_heads=self.heads, embed_dim=self.embed_dim, # dropout=self.keep_prob_attn) attention = MultiHeadedAttentionGoogle(heads=self.heads, d_model=self.embed_dim, keep_prob=self.keep_prob_attn) ff = PositionalFeedForwardGoogle(d_model=self.embed_dim, d_ff=self.num_hidden_pos_ff, keep_prob=self.kee_prob_pff) embeddings = Embeddings(self.embed_dim, self.vocab_size, self.padding_id, self.use_pretrained_embed, self.pretrained_weights, optional_sqrt_mul=self.use_embed_sqrt_mul) positional_embeddings = PositionalEncodingGoogle(d_model=self.embed_dim, keep_prob=self.keep_prob_pe, max_len=self.max_length) # Initialize the full model model = EncoderClassifier(nn.Sequential(embeddings, c(positional_embeddings)), EncoderBlockGoogle( EncoderLayerGoogle(self.embed_dim, c(attention), c(ff), self.keep_prob_encoder), self.num_encoder_layers), Classifier(self.embed_dim, d_hidden=self.embed_dim // 2, num_classes=self.num_class, keep_prob=self.keep_prob_clf), device=self.device) # Initialize model parameters for p in model.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) return model if __name__ == '__main__': print("Transformer tests") plt.figure(figsize=(15, 5)) pe = PositionalEncodingGoogle(20, 0) y = pe.forward(Variable(torch.zeros(1, 100, 20))) plt.plot(np.arange(100), y[0, :, 4:8].data.numpy()) plt.legend(["dim %d" % p for p in [4, 5, 6, 7]]) plt.show()
37.948276
119
0.6408
import copy import math import matplotlib.pyplot as plt import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torch.autograd import Variable from utils.utils import clones class LayerNormGoogle(nn.Module): def __init__(self, features, epsilon=1e-6): super(LayerNormGoogle, self).__init__() self.a_2 = nn.Parameter(torch.ones(features)) self.b_2 = nn.Parameter(torch.zeros(features)) self.epsilon = epsilon def forward(self, x): mean = x.mean(-1, keepdim=True) std = x.std(-1, keepdim=True) return self.a_2 * (x - mean) / (std + self.epsilon) + self.b_2 class EncoderBlockGoogle(nn.Module): def __init__(self, layer, num_layers): super(EncoderBlockGoogle, self).__init__() self.layers = clones(layer, num_layers) self.norm = LayerNormGoogle(layer.size) def forward(self, x, mask): for layer in self.layers: x = layer(x, mask) return self.norm(x) class ResidualConnectionGoogle(nn.Module): def __init__(self, size, keep_prob): super(ResidualConnectionGoogle, self).__init__() self.norm = LayerNormGoogle(size) self.dropout = nn.Dropout(keep_prob) def forward(self, input, sublayer): return input + self.dropout(sublayer(self.norm(input))) class EncoderLayerGoogle(nn.Module): def __init__(self, size, attention, feed_forward, keep_prob): super(EncoderLayerGoogle, self).__init__() self.size = size self.attention = attention self.feed_forward = feed_forward self.sublayer = clones(ResidualConnectionGoogle(size, keep_prob), 2) def forward(self, x, mask): x = self.sublayer[0](x, lambda x: self.attention(x, x, x, mask)) return self.sublayer[1](x, self.feed_forward) class EncoderClassifier(nn.Module): def __init__(self, embedding, encoder, classifier, device, is_average=True): super(EncoderClassifier, self).__init__() self.embedding = embedding self.encoder = encoder self.classifier = classifier self.device = device self.is_average = is_average def forward(self, x, mask=None): kl_loss = torch.Tensor([0.0]) x = x.permute(1, 0) x = self.embedding(x) if "cuda" in str(self.device): x = x.cuda() kl_loss = kl_loss.cuda() x = self.encoder(x, mask) if self.is_average: x = torch.mean(x, dim=1) x = self.classifier(x) return x, kl_loss class Classifier(nn.Module): def __init__(self, d_model, d_hidden, num_classes, keep_prob): super(Classifier, self).__init__() self.linear1 = nn.Linear(d_model, d_hidden) self.dropout = nn.Dropout(keep_prob) self.relu = nn.ReLU() self.linear2 = nn.Linear(d_hidden, num_classes) def forward(self, x): x = self.dropout(self.relu(self.linear1(x))) x = self.linear2(x) return x class MultiHeadedAttentionGoogle(nn.Module): def __init__(self, heads=8, d_model=512, keep_prob=0.1): super(MultiHeadedAttentionGoogle, self).__init__() assert d_model % heads == 0 self.d_k = d_model // heads self.heads = heads self.linears = clones(nn.Linear(d_model, d_model), 4) self.attn = None self.dropout = nn.Dropout(keep_prob) def attention(self, query, key, value, mask=None): d_k = query.size(-1) scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) if mask is not None: scores = scores.masked_fill(mask == 0, -1e9) p_attn = F.softmax(scores, dim=-1) if self.dropout is not None: p_attn = self.dropout(p_attn) return torch.matmul(p_attn, value), p_attn def forward(self, query, key, value, mask=None): num_batches = query.size(0) if mask is not None: mask = mask.unsqueeze(1) query, key, value = [linear(x).view(num_batches, -1, self.heads, self.d_k).transpose(1, 2) for linear, x in zip(self.linears, (query, key, value))] x, self.attn = self.attention(query, key, value, mask=mask) x = x.transpose(1, 2).contiguous().view(num_batches, -1, self.heads * self.d_k) return self.linears[-1](x) class PositionalFeedForwardGoogle(nn.Module): def __init__(self, d_model, d_ff, keep_prob=0.1): super(PositionalFeedForwardGoogle, self).__init__() self.w_1 = nn.Linear(d_model, d_ff) self.w_2 = nn.Linear(d_ff, d_model) self.dropout = nn.Dropout(keep_prob) self.relu = nn.ReLU() def forward(self, input): return self.w_2(self.dropout(self.relu(self.w_1(input)))) class Embeddings(nn.Module): def __init__(self, embed_dim, vocab_size, padding_id, use_pretrained_embed, pretrained_weights, optional_sqrt_mul=False): super(Embeddings, self).__init__() self.embedding = nn.Embedding(vocab_size, embed_dim, padding_idx=padding_id).cpu() if use_pretrained_embed: self.embedding.from_pretrained(pretrained_weights) self.embed_dim = embed_dim self.optional_sqrt_mul = optional_sqrt_mul def forward(self, input): if self.optional_sqrt_mul: return self.embedding(input) * math.sqrt(self.embed_dim) else: return self.embedding(input) class PositionalEncodingGoogle(nn.Module): def __init__(self, d_model, keep_prob=0.1, max_len=5000): super(PositionalEncodingGoogle, self).__init__() self.dropout = nn.Dropout(keep_prob) positional_encoding = torch.zeros(max_len, d_model) pos = torch.arange(0., max_len).unsqueeze(1) div_term = torch.exp(torch.arange(0., d_model, 2) * (-math.log(10000) / d_model)) positional_encoding[:, 0::2] = torch.sin(pos * div_term) positional_encoding[:, 1::2] = torch.cos(pos * div_term) positional_encoding = positional_encoding.unsqueeze(0) self.register_buffer("pe", positional_encoding) def forward(self, input): return self.dropout(input + Variable(self.pe[:, :input.size(1)], requires_grad=False)) class TransformerGoogle: def __init__(self, args): super(TransformerGoogle, self).__init__() self.args_common = args["common_model_properties"] self.args_specific = args["transformer_google"] self.device = self.args_common["device"] self.vocab_size = self.args_common["vocab_size"] self.embed_dim = self.args_common["embed_dim"] self.num_class = self.args_common["num_class"] self.padding_id = self.args_common["padding_id"] self.use_pretrained_embed = self.args_common["use_pretrained_embed"] self.use_embed_sqrt_mul = self.args_specific["use_embed_sqrt_mul"] self.pretrained_weights = self.args_common["pretrained_weights"] self.keep_prob_encoder = self.args_specific["keep_prob_encoder"] self.keep_prob_pe = self.args_specific["keep_prob_pe"] self.kee_prob_pff = self.args_specific["keep_prob_pff"] self.keep_prob_attn = self.args_specific["keep_prob_attn"] self.keep_prob_clf = self.args_specific["keep_prob_clf"] self.transformer_type = self.args_specific["transformer_type"] self.heads = self.args_specific["heads"] self.num_encoder_layers = self.args_specific["num_encoder_layers"] self.num_hidden_pos_ff = self.args_specific["num_hidden_pos_ff"] self.max_length = self.args_specific["max_length"] if self.transformer_type == "classifier": self.model = self.create_classifier_transformer() else: raise ValueError("Transformer can be created as classifier for now!") def create_classifier_transformer(self): c = copy.deepcopy attention = MultiHeadedAttentionGoogle(heads=self.heads, d_model=self.embed_dim, keep_prob=self.keep_prob_attn) ff = PositionalFeedForwardGoogle(d_model=self.embed_dim, d_ff=self.num_hidden_pos_ff, keep_prob=self.kee_prob_pff) embeddings = Embeddings(self.embed_dim, self.vocab_size, self.padding_id, self.use_pretrained_embed, self.pretrained_weights, optional_sqrt_mul=self.use_embed_sqrt_mul) positional_embeddings = PositionalEncodingGoogle(d_model=self.embed_dim, keep_prob=self.keep_prob_pe, max_len=self.max_length) model = EncoderClassifier(nn.Sequential(embeddings, c(positional_embeddings)), EncoderBlockGoogle( EncoderLayerGoogle(self.embed_dim, c(attention), c(ff), self.keep_prob_encoder), self.num_encoder_layers), Classifier(self.embed_dim, d_hidden=self.embed_dim // 2, num_classes=self.num_class, keep_prob=self.keep_prob_clf), device=self.device) for p in model.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) return model if __name__ == '__main__': print("Transformer tests") plt.figure(figsize=(15, 5)) pe = PositionalEncodingGoogle(20, 0) y = pe.forward(Variable(torch.zeros(1, 100, 20))) plt.plot(np.arange(100), y[0, :, 4:8].data.numpy()) plt.legend(["dim %d" % p for p in [4, 5, 6, 7]]) plt.show()
true
true
f703d2f45a05feea75027e7f0d41d7453e9b6fc3
3,283
py
Python
program/model/version/ver1_2.py
donyori/2018ccf_bdci_inter_fund_correlation_prediction
6e06a3e192e05ae1e9822111cf323eda3a61bf4e
[ "MIT" ]
null
null
null
program/model/version/ver1_2.py
donyori/2018ccf_bdci_inter_fund_correlation_prediction
6e06a3e192e05ae1e9822111cf323eda3a61bf4e
[ "MIT" ]
1
2018-12-18T05:14:08.000Z
2019-01-16T06:31:35.000Z
program/model/version/ver1_2.py
donyori/2018ccf_bdci_inter_fund_correlation_prediction
6e06a3e192e05ae1e9822111cf323eda3a61bf4e
[ "MIT" ]
null
null
null
from tensorflow import keras from constants import TRADING_DAYS_PER_WEEK, INDEX_RETURN_INDICATOR_NUMBER from ..constants import * MODEL_NAME = 'ifcp_model_ver1_2' ROLLING_WINDOW_SIZE = TRADING_DAYS_PER_WEEK def build_model(): fund1_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, 1), name=FUND1_RETURN_NAME) fund1_benchmark_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, 1), name=FUND1_BENCHMARK_RETURN_NAME) fund2_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, 1), name=FUND2_RETURN_NAME) fund2_benchmark_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, 1), name=FUND2_BENCHMARK_RETURN_NAME) fund1_performance = keras.layers.subtract([fund1_return, fund1_benchmark_return], name='fund1_performance') fund2_performance = keras.layers.subtract([fund2_return, fund2_benchmark_return], name='fund2_performance') fund1_attributes = keras.layers.concatenate( [fund1_return, fund1_benchmark_return, fund1_performance], name='fund1_attributes') fund2_attributes = keras.layers.concatenate( [fund2_return, fund2_benchmark_return, fund2_performance], name='fund2_attributes') fund_attributes_gru = keras.layers.GRU( 12, kernel_regularizer=keras.regularizers.l2(0.01), recurrent_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01), name='fund_attributes_gru', ) fund1_attributes_after_gru = fund_attributes_gru(fund1_attributes) fund2_attributes_after_gru = fund_attributes_gru(fund2_attributes) fund_attributes_after_gru = keras.layers.concatenate( [fund1_attributes_after_gru, fund2_attributes_after_gru], name='fund_attributes_after_gru') auxiliary_output = keras.layers.Dense(1, activation='sigmoid', name=AUXILIARY_OUTPUT_NAME)( fund_attributes_after_gru) index_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, INDEX_RETURN_INDICATOR_NUMBER), name=INDEX_RETURN_NAME) index_return_gru = keras.layers.GRU( 35, kernel_regularizer=keras.regularizers.l2(0.01), recurrent_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01), name='index_return_gru', ) index_return_after_gru = index_return_gru(index_return) merge = keras.layers.concatenate([fund_attributes_after_gru, index_return_after_gru], name='merge') x = keras.layers.Dense(64, activation='relu', kernel_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01))(merge) x = keras.layers.Dense(64, activation='relu', kernel_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01))(x) x = keras.layers.Dense(16, activation='relu', kernel_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01))(x) main_output = keras.layers.Dense(1, activation='sigmoid', name=MAIN_OUTPUT_NAME)(x) model = keras.Model(inputs=[ fund1_return, fund1_benchmark_return, fund2_return, fund2_benchmark_return, index_return], outputs=[main_output, auxiliary_output]) return model
49.742424
114
0.73378
from tensorflow import keras from constants import TRADING_DAYS_PER_WEEK, INDEX_RETURN_INDICATOR_NUMBER from ..constants import * MODEL_NAME = 'ifcp_model_ver1_2' ROLLING_WINDOW_SIZE = TRADING_DAYS_PER_WEEK def build_model(): fund1_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, 1), name=FUND1_RETURN_NAME) fund1_benchmark_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, 1), name=FUND1_BENCHMARK_RETURN_NAME) fund2_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, 1), name=FUND2_RETURN_NAME) fund2_benchmark_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, 1), name=FUND2_BENCHMARK_RETURN_NAME) fund1_performance = keras.layers.subtract([fund1_return, fund1_benchmark_return], name='fund1_performance') fund2_performance = keras.layers.subtract([fund2_return, fund2_benchmark_return], name='fund2_performance') fund1_attributes = keras.layers.concatenate( [fund1_return, fund1_benchmark_return, fund1_performance], name='fund1_attributes') fund2_attributes = keras.layers.concatenate( [fund2_return, fund2_benchmark_return, fund2_performance], name='fund2_attributes') fund_attributes_gru = keras.layers.GRU( 12, kernel_regularizer=keras.regularizers.l2(0.01), recurrent_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01), name='fund_attributes_gru', ) fund1_attributes_after_gru = fund_attributes_gru(fund1_attributes) fund2_attributes_after_gru = fund_attributes_gru(fund2_attributes) fund_attributes_after_gru = keras.layers.concatenate( [fund1_attributes_after_gru, fund2_attributes_after_gru], name='fund_attributes_after_gru') auxiliary_output = keras.layers.Dense(1, activation='sigmoid', name=AUXILIARY_OUTPUT_NAME)( fund_attributes_after_gru) index_return = keras.Input(shape=(ROLLING_WINDOW_SIZE, INDEX_RETURN_INDICATOR_NUMBER), name=INDEX_RETURN_NAME) index_return_gru = keras.layers.GRU( 35, kernel_regularizer=keras.regularizers.l2(0.01), recurrent_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01), name='index_return_gru', ) index_return_after_gru = index_return_gru(index_return) merge = keras.layers.concatenate([fund_attributes_after_gru, index_return_after_gru], name='merge') x = keras.layers.Dense(64, activation='relu', kernel_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01))(merge) x = keras.layers.Dense(64, activation='relu', kernel_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01))(x) x = keras.layers.Dense(16, activation='relu', kernel_regularizer=keras.regularizers.l2(0.01), activity_regularizer=keras.regularizers.l1(0.01))(x) main_output = keras.layers.Dense(1, activation='sigmoid', name=MAIN_OUTPUT_NAME)(x) model = keras.Model(inputs=[ fund1_return, fund1_benchmark_return, fund2_return, fund2_benchmark_return, index_return], outputs=[main_output, auxiliary_output]) return model
true
true
f703d302b98ab56a3febdd91cdf7285c913024e2
134
py
Python
fugue/extensions/creator/__init__.py
WangCHX/fugue
bb9758d23b0041d4ed00f6195f317d097a9dd683
[ "Apache-2.0" ]
null
null
null
fugue/extensions/creator/__init__.py
WangCHX/fugue
bb9758d23b0041d4ed00f6195f317d097a9dd683
[ "Apache-2.0" ]
null
null
null
fugue/extensions/creator/__init__.py
WangCHX/fugue
bb9758d23b0041d4ed00f6195f317d097a9dd683
[ "Apache-2.0" ]
null
null
null
# flake8: noqa from fugue.extensions.creator.creator import Creator from fugue.extensions.creator.convert import creator, _to_creator
33.5
65
0.843284
from fugue.extensions.creator.creator import Creator from fugue.extensions.creator.convert import creator, _to_creator
true
true
f703d38691891112aa274bc0ec16962638b37767
2,140
py
Python
app/utils/process.py
duzhangqiong/AutoLine
9c32261eb9a47610ee01787420c300b42eee26ac
[ "Apache-2.0" ]
249
2018-05-11T08:43:17.000Z
2021-08-13T09:32:57.000Z
app/utils/process.py
duzhangqiong/AutoLine
9c32261eb9a47610ee01787420c300b42eee26ac
[ "Apache-2.0" ]
31
2018-05-15T01:25:18.000Z
2022-03-12T00:11:29.000Z
app/utils/process.py
duzhangqiong/AutoLine
9c32261eb9a47610ee01787420c300b42eee26ac
[ "Apache-2.0" ]
132
2018-03-01T02:34:52.000Z
2021-02-08T02:00:40.000Z
# -*- coding: utf-8 -*- __author__ = "苦叶子" """ 公众号: 开源优测 Email: lymking@foxmail.com """ import os import time import tempfile import subprocess class Process: def __init__(self, command): self._command = command self._process = None self._error = None self._out_file = None self._out_path = None self._out_fd = None print(command) def start(self): self._out_fd, self._out_path = tempfile.mkstemp(prefix='rfproc_', suffix='.txt', text=True) self._out_file = open(self._out_path) try: self._process = subprocess.Popen(self._command, stdout=self._out_fd, stderr=subprocess.STDOUT) except OSError as err: self._error = str(err) def is_finished(self): return self._error is not None or self._process.poll() is not None def stop(self): self._process.kill() def wait(self): if self._process is not None: self._process.wait() def get_output(self, wait_until_finished=False): """Returns the output produced by the process. If ``wait_until_finished`` is True, blocks until the process is finished and returns all output. Otherwise the currently available output is returned immediately. Currently available output depends on buffering and might not include everything that has been written by the process. """ if self._error: self._close_outputs() return self._error if wait_until_finished: self._process.wait() output = self._out_file.read() if self.is_finished(): self._close_outputs() return output def _close_outputs(self): self._out_file.close() os.close(self._out_fd) self._remove_tempfile() def _remove_tempfile(self, attempts=10): try: os.remove(self._out_path) except OSError: if not attempts: raise time.sleep(1) self._remove_tempfile(attempts - 1)
26.097561
99
0.600467
__author__ = "苦叶子" import os import time import tempfile import subprocess class Process: def __init__(self, command): self._command = command self._process = None self._error = None self._out_file = None self._out_path = None self._out_fd = None print(command) def start(self): self._out_fd, self._out_path = tempfile.mkstemp(prefix='rfproc_', suffix='.txt', text=True) self._out_file = open(self._out_path) try: self._process = subprocess.Popen(self._command, stdout=self._out_fd, stderr=subprocess.STDOUT) except OSError as err: self._error = str(err) def is_finished(self): return self._error is not None or self._process.poll() is not None def stop(self): self._process.kill() def wait(self): if self._process is not None: self._process.wait() def get_output(self, wait_until_finished=False): if self._error: self._close_outputs() return self._error if wait_until_finished: self._process.wait() output = self._out_file.read() if self.is_finished(): self._close_outputs() return output def _close_outputs(self): self._out_file.close() os.close(self._out_fd) self._remove_tempfile() def _remove_tempfile(self, attempts=10): try: os.remove(self._out_path) except OSError: if not attempts: raise time.sleep(1) self._remove_tempfile(attempts - 1)
true
true
f703d3e86257aec1a99970afe7f847828a980f0d
8,382
py
Python
linnapi/requests/inventory.py
stcstores/linnapi
88cc1d73444f44b7d901dc1086790a300066ea6e
[ "MIT" ]
null
null
null
linnapi/requests/inventory.py
stcstores/linnapi
88cc1d73444f44b7d901dc1086790a300066ea6e
[ "MIT" ]
null
null
null
linnapi/requests/inventory.py
stcstores/linnapi
88cc1d73444f44b7d901dc1086790a300066ea6e
[ "MIT" ]
null
null
null
"""Inventory requests.""" from collections import defaultdict from typing import Any, MutableMapping import requests from linnapi.request import LinnworksAPIRequest class GetStockItemIDsBySKU(LinnworksAPIRequest): """Return the stock item ID for a SKU.""" URL = "https://eu-ext.linnworks.net/api/Inventory/GetStockItemIdsBySKU" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: """Return request JSON post data.""" skus: list[str] = kwargs["skus"] return {"request": {"SKUS": skus}} class GetStockLevel(LinnworksAPIRequest): """Return the current stock level for a product by stock item ID.""" URL = "https://eu-ext.linnworks.net/api/Stock/GetStockLevel" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: """Return request JSON post data.""" stock_item_id: str = kwargs["stock_item_id"] return {"stockItemId": stock_item_id} class GetStockLevelBatch(LinnworksAPIRequest): """Return the stock level for multiple products by stock item ID.""" URL = "https://eu-ext.linnworks.net/api/Stock/GetStockLevel_Batch" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: """Return request JSON post data.""" stock_item_ids: list[str] = kwargs["stock_item_ids"] return {"request": {"StockItemIDs": stock_item_ids}} class SetStockLevelBySKU(LinnworksAPIRequest): """Update the stock level for a product.""" URL = "https://eu-ext.linnworks.net/api/Stock/UpdateStockLevelsBySKU" METHOD = LinnworksAPIRequest.POST @classmethod def params(cls, *args: Any, **kwargs: Any) -> dict[str, Any]: """Return request URL parameters.""" return {"changeSource": str(kwargs["change_source"])} @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: """Return request JSON post data.""" location_id: str = kwargs["location_id"] changes: tuple[tuple[str, int]] = kwargs["changes"] stock_levels = [ {"SKU": str(sku), "LocationID": location_id, "Level": int(level)} for sku, level in changes ] return {"stockLevels": stock_levels} class AddImageToInventoryItem(LinnworksAPIRequest): """ Adds an image to a stock item. Either `item_number` or `stock_item_id` must be passed. Kwargs: image_url (str): The URL of the image to be added. item_number (str): The SKU of the product to add the image to. stock_item_id (str): The ID (GUID) of the product to add the image to. is_main (bool): Is the image the main image for the product. """ URL = "https://eu-ext.linnworks.net/api/Inventory/AddImageToInventoryItem" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: """Return request JSON post data.""" item_number: str = kwargs.get("item_number", "") stock_item_id: str = kwargs.get("stock_item_id", "") is_main: bool = kwargs["is_main"] image_url: str = kwargs["image_url"] request_data = { "IsMain": is_main, "ImageUrl": image_url, } if not item_number and not stock_item_id: raise ValueError("Either `stock_item_id` or `sku` must be passed.") if item_number: request_data["ItemNumber"] = item_number if stock_item_id: request_data["StockItemId"] = stock_item_id return {"request": request_data} class UpdateImages(LinnworksAPIRequest): """ Update properties on images. Kwargs: row_id (str): ID (GUID) of image, passed as "pkRowId". Required. stock_item_id (str): The ID (GUID) of the stock item to which the image belongs. Requred. is_main (bool): Set weather the image is the main image or not, passed as "IsMain". sort_order (int): The position of the image, passed as "SortOrder". """ URL = "https://eu-ext.linnworks.net/api/Inventory/UpdateImages" METHOD = LinnworksAPIRequest.POST @classmethod def item_json(cls, **kwargs: Any) -> dict[str, Any]: """Return request data for a single image.""" row_id = kwargs.get("row_id") is_main = kwargs.get("is_main") sort_order = kwargs.get("sort_order") checksum_value = kwargs.get("checksum_value") raw_checksum = kwargs.get("raw_checksum") stock_item_id = kwargs.get("stock_item_id") stock_item_int_id = kwargs.get("stock_item_id_int") image_data = { "pkRowId": row_id, "IsMain": is_main, "SortOrder": sort_order, "ChecksumValue": checksum_value, "RawChecksum": raw_checksum, "StockItemId": stock_item_id, "StockItemIntId": stock_item_int_id, } return {key: value for key, value in image_data.items() if value is not None} @classmethod def multi_json( cls, requests: list[MutableMapping[Any, Any]] ) -> dict[str, Any] | list[Any]: """Return request JSON with multiple updates.""" return {"images": [cls.item_json(**request) for request in requests]} @classmethod def parse_response( cls, response: requests.models.Response, *args: Any, **kwargs: Any ) -> str: """Parse the request response.""" return response.text class GetInventoryItemImages(LinnworksAPIRequest): """ Use this call to Get inventory item images. Args: inventory_item_id (str): The ID (GUID) of the stock item to retrive images for, passed as "InventoryItemId". """ URL = "https://eu-ext.linnworks.net/api/Inventory/GetInventoryItemImages" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: """Return request JSON post data.""" inventory_item_id = kwargs.get("inventory_item_id") return {"inventoryItemId": inventory_item_id} class DeleteImagesFromInventoryItem(LinnworksAPIRequest): """ Remove an image from an inventory item. Kwargs: image_id (str): ID (GUID) of image, passed as "pkRowId". Required. stock_item_id (str): The ID (GUID) of the stock item to which the image belongs. Requred. """ URL = "https://eu-ext.linnworks.net/api/Inventory/DeleteImagesFromInventoryItem" METHOD = LinnworksAPIRequest.POST @classmethod def item_json(cls, **kwargs: Any) -> dict[str, Any]: """Return request data for a single image.""" stock_item_id = kwargs["stock_item_id"] image_url = kwargs["image_url"] return {stock_item_id: [image_url]} @classmethod def multi_json( cls, requests: list[MutableMapping[Any, Any]] ) -> dict[str, Any] | list[Any]: """Return request JSON with multiple updates.""" stock_items = defaultdict(list) for request in requests: for key, images in cls.item_json(**request).items(): stock_items[key].extend(images) return {"inventoryItemImages": dict(stock_items)} @classmethod def parse_response( cls, response: requests.models.Response, *args: Any, **kwargs: Any ) -> str: """Parse the request response.""" return response.text class GetItemChangesHistory(LinnworksAPIRequest): """Get the stock change history for an item. Kwargs: """ URL = "https://eu-ext.linnworks.net/api/Stock/GetItemChangesHistory" METHOD = LinnworksAPIRequest.POST @classmethod def params(cls, *args: Any, **kwargs: Any) -> dict[str, Any]: """Return request JSON post data.""" stock_item_id = kwargs.get("stock_item_id") location_id = kwargs.get("location_id", "") entries_per_page = kwargs.get("entries_per_page", 500) page_number = kwargs.get("page_number", 1) return { "stockItemId": stock_item_id, "locationId": location_id, "entriesPerPage": entries_per_page, "pageNumber": page_number, }
34.780083
91
0.636722
from collections import defaultdict from typing import Any, MutableMapping import requests from linnapi.request import LinnworksAPIRequest class GetStockItemIDsBySKU(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Inventory/GetStockItemIdsBySKU" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: skus: list[str] = kwargs["skus"] return {"request": {"SKUS": skus}} class GetStockLevel(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Stock/GetStockLevel" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: stock_item_id: str = kwargs["stock_item_id"] return {"stockItemId": stock_item_id} class GetStockLevelBatch(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Stock/GetStockLevel_Batch" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: stock_item_ids: list[str] = kwargs["stock_item_ids"] return {"request": {"StockItemIDs": stock_item_ids}} class SetStockLevelBySKU(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Stock/UpdateStockLevelsBySKU" METHOD = LinnworksAPIRequest.POST @classmethod def params(cls, *args: Any, **kwargs: Any) -> dict[str, Any]: return {"changeSource": str(kwargs["change_source"])} @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: location_id: str = kwargs["location_id"] changes: tuple[tuple[str, int]] = kwargs["changes"] stock_levels = [ {"SKU": str(sku), "LocationID": location_id, "Level": int(level)} for sku, level in changes ] return {"stockLevels": stock_levels} class AddImageToInventoryItem(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Inventory/AddImageToInventoryItem" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: item_number: str = kwargs.get("item_number", "") stock_item_id: str = kwargs.get("stock_item_id", "") is_main: bool = kwargs["is_main"] image_url: str = kwargs["image_url"] request_data = { "IsMain": is_main, "ImageUrl": image_url, } if not item_number and not stock_item_id: raise ValueError("Either `stock_item_id` or `sku` must be passed.") if item_number: request_data["ItemNumber"] = item_number if stock_item_id: request_data["StockItemId"] = stock_item_id return {"request": request_data} class UpdateImages(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Inventory/UpdateImages" METHOD = LinnworksAPIRequest.POST @classmethod def item_json(cls, **kwargs: Any) -> dict[str, Any]: row_id = kwargs.get("row_id") is_main = kwargs.get("is_main") sort_order = kwargs.get("sort_order") checksum_value = kwargs.get("checksum_value") raw_checksum = kwargs.get("raw_checksum") stock_item_id = kwargs.get("stock_item_id") stock_item_int_id = kwargs.get("stock_item_id_int") image_data = { "pkRowId": row_id, "IsMain": is_main, "SortOrder": sort_order, "ChecksumValue": checksum_value, "RawChecksum": raw_checksum, "StockItemId": stock_item_id, "StockItemIntId": stock_item_int_id, } return {key: value for key, value in image_data.items() if value is not None} @classmethod def multi_json( cls, requests: list[MutableMapping[Any, Any]] ) -> dict[str, Any] | list[Any]: return {"images": [cls.item_json(**request) for request in requests]} @classmethod def parse_response( cls, response: requests.models.Response, *args: Any, **kwargs: Any ) -> str: return response.text class GetInventoryItemImages(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Inventory/GetInventoryItemImages" METHOD = LinnworksAPIRequest.POST @classmethod def json(cls, *args: Any, **kwargs: Any) -> dict[str, Any] | list[Any]: inventory_item_id = kwargs.get("inventory_item_id") return {"inventoryItemId": inventory_item_id} class DeleteImagesFromInventoryItem(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Inventory/DeleteImagesFromInventoryItem" METHOD = LinnworksAPIRequest.POST @classmethod def item_json(cls, **kwargs: Any) -> dict[str, Any]: stock_item_id = kwargs["stock_item_id"] image_url = kwargs["image_url"] return {stock_item_id: [image_url]} @classmethod def multi_json( cls, requests: list[MutableMapping[Any, Any]] ) -> dict[str, Any] | list[Any]: stock_items = defaultdict(list) for request in requests: for key, images in cls.item_json(**request).items(): stock_items[key].extend(images) return {"inventoryItemImages": dict(stock_items)} @classmethod def parse_response( cls, response: requests.models.Response, *args: Any, **kwargs: Any ) -> str: return response.text class GetItemChangesHistory(LinnworksAPIRequest): URL = "https://eu-ext.linnworks.net/api/Stock/GetItemChangesHistory" METHOD = LinnworksAPIRequest.POST @classmethod def params(cls, *args: Any, **kwargs: Any) -> dict[str, Any]: stock_item_id = kwargs.get("stock_item_id") location_id = kwargs.get("location_id", "") entries_per_page = kwargs.get("entries_per_page", 500) page_number = kwargs.get("page_number", 1) return { "stockItemId": stock_item_id, "locationId": location_id, "entriesPerPage": entries_per_page, "pageNumber": page_number, }
true
true
f703d55dccf3102a6d3ac6083df3373cec870017
9,334
py
Python
www/Webserver.py
william-stearns/E_ink_dashboard
1625b213baef336833497c4593157485cfffdad4
[ "MIT" ]
67
2020-10-06T11:03:51.000Z
2022-02-14T23:07:45.000Z
www/Webserver.py
william-stearns/E_ink_dashboard
1625b213baef336833497c4593157485cfffdad4
[ "MIT" ]
11
2020-11-20T17:44:36.000Z
2021-03-28T19:10:34.000Z
www/Webserver.py
william-stearns/E_ink_dashboard
1625b213baef336833497c4593157485cfffdad4
[ "MIT" ]
9
2020-10-06T15:10:47.000Z
2021-07-12T14:14:51.000Z
from flask import Flask, render_template, request from dashboard_forms import Dashform #import create_pickle as p_j import json import os app = Flask(__name__) app.secret_key = 'dash_flask_key' creddir = os.path.join(os.path.dirname( os.path.dirname(os.path.realpath(__file__))), 'credentials/dash_id.json') # creddir_2 = os.path.join(os.path.dirname( # os.path.dirname(os.path.realpath(__file__))), 'credentials') tempdir = os.path.join(os.path.dirname( os.path.dirname(os.path.realpath(__file__))), 'www/templates/dash_id_template.json') def Convert(string): li = list(string.split(",")) k = [] for i in li: str(i).replace(' ', '') k.append(i) return k def formatting(string): string = string.replace("[", "") string = string.replace("]", "") string = string.replace("'", "") string = string.replace(" ", "") return string def json_exists(file_name): return os.path.exists(file_name) def getinfo(): data = [] if json_exists(creddir): with open(creddir, "r") as rdash_id: data = json.load(rdash_id) return data else: with open(tempdir, "r") as f1, open(creddir, "w+") as f2: f2.write(f1.read()) f2.close with open(creddir, "r") as rdash_id: data = json.load(rdash_id) return data def save_json(res): with open(creddir, 'r') as f: data = json.load(f) data["Transit"]["T_URL"] = res["T_URL"] data["Transit"]["T_API_KEY"] = res["T_API_KEY"] data["Transit"]["Stops"] = Convert(res["Stops"]) data["Transit"]["T_BUS"] = res["T_BUS"] data["Transit"]["T_BUS_TIME"] = res["T_BUS_TIME"] data["Weather"]["W_URL"] = res["W_URL"] data["Weather"]["UNITS"] = res["UNITS"] data["Weather"]["W_API_KEY"] = res["W_API_KEY"] data["Geolocation"]["G_URL"] = res["G_URL"] data["Geolocation"]["G_API_KEY"] = res["G_API_KEY"] data["Currency"]["C_URL_1"] = res["C_URL_1"] data["Currency"]["C_API_KEY_1"] = res["C_API_KEY_1"] data["Currency"]["C_URL_3"] = res["C_URL_3"] data["Currency"]["C_URL_4"] = res["C_URL_4"] data["Currency"]["CURR_CHECK"] = Convert(res["CURR_CHECK"]) data["Stocks"]["STOCK_W_URL"] = res["STOCK_W_URL"] data["Stocks"]["STOCK_WE_URL"] = res["STOCK_WE_URL"] data["Stocks"]["STOCK_API"] = res["STOCK_API"] data["Stocks"]["STOCK_CHECK"] = Convert(res["STOCK_CHECK"]) data["Tasklist"]["gsheet_json"] = res["gsheet_json"] data["Tasklist"]["sheetname"] = res["sheetname"] data["G_Meetings"]["CREDENTIALS_FILE"] = res["CREDENTIALS_FILE"] data["News"]["NEWS_URL"] = res["NEWS_URL"] data["News"]["NEWS_API"] = res["NEWS_API"] data["News"]["NEWS_SOURCES"] = str(res["NEWS_SOURCES"]).replace(' ', '') data["System"]["waking_time"] = res["waking_time"] data["System"]["sleeping_time"] = res["sleeping_time"] data["System"]["mod_1_choice"] = res["mod_1_choice"] data["System"]["mod_2_choice"] = res["mod_2_choice"] data["System"]["mod_3_choice"] = res["mod_3_choice"] data["System"]["mod_4_choice"] = res["mod_4_choice"] data["System"]["refresh_time"] = res["refresh_time"] data["System"]["awake"] = res["awake"] os.remove(creddir) with open(creddir, 'w+') as f: json.dump(data, f, indent=4) @ app.route('/', methods=['POST', 'GET']) def login(): form = Dashform() d_data = getinfo() form.res_msg.label = "" if request.method == 'POST': form.res_msg.label = "" if request.form['btn'] == 'Submit': results = request.form save_json(results) form.res_msg.label = "Information saved successfully" '''elif request.form['btn'] == 'Generate Pickle File': results = request.form p_j.get_calendar_service(results["CREDENTIALS_FILE"], creddir_2) ''' d_data = getinfo() form.T_URL.data = str(d_data["Transit"]["T_URL"]) form.T_API_KEY.data = str(d_data["Transit"]["T_API_KEY"]) form.Stops.data = formatting(str(d_data["Transit"]["Stops"])) form.T_BUS.data = str(d_data["Transit"]["T_BUS"]) form.T_BUS_TIME.data = str(d_data["Transit"]["T_BUS_TIME"]) form.W_URL.data = str(d_data["Weather"]["W_URL"]) form.W_API_KEY.data = str(d_data["Weather"]["W_API_KEY"]) form.UNITS.data = str(d_data["Weather"]["UNITS"]) form.C_URL_1.data = str(d_data["Currency"]["C_URL_1"]) form.C_API_KEY_1.data = str(d_data["Currency"]["C_API_KEY_1"]) form.C_URL_3.data = str(d_data["Currency"]["C_URL_3"]) form.C_URL_4.data = str(d_data["Currency"]["C_URL_4"]) form.CURR_CHECK.data = formatting(str(d_data["Currency"]["CURR_CHECK"])) form.STOCK_W_URL.data = str(d_data["Stocks"]["STOCK_W_URL"]) form.STOCK_WE_URL.data = str(d_data["Stocks"]["STOCK_WE_URL"]) form.STOCK_API.data = str(d_data["Stocks"]["STOCK_API"]) form.STOCK_CHECK.data = formatting(str(d_data["Stocks"]["STOCK_CHECK"])) form.G_URL.data = str(d_data["Geolocation"]["G_URL"]) form.G_API_KEY.data = str(d_data["Geolocation"]["G_API_KEY"]) form.gsheet_json.data = str(d_data["Tasklist"]["gsheet_json"]) form.sheetname.data = str(d_data["Tasklist"]["sheetname"]) form.CREDENTIALS_FILE.data = str(d_data["G_Meetings"]["CREDENTIALS_FILE"]) form.NEWS_URL.data = str(d_data["News"]["NEWS_URL"]) form.NEWS_API.data = str(d_data["News"]["NEWS_API"]) form.NEWS_SOURCES.data = formatting(str(d_data["News"]["NEWS_SOURCES"])) form.waking_time.data = str(d_data["System"]["waking_time"]) form.sleeping_time.data = str(d_data["System"]["sleeping_time"]) form.mod_1_choice.data = str(d_data["System"]["mod_1_choice"]) form.mod_2_choice.data = str(d_data["System"]["mod_2_choice"]) form.mod_3_choice.data = str(d_data["System"]["mod_3_choice"]) form.mod_4_choice.data = str(d_data["System"]["mod_4_choice"]) form.refresh_time.data = str(d_data["System"]["refresh_time"]) form.awake.data = str(d_data["System"]["awake"]) return render_template('Settings.html', form=form) elif request.method == 'GET': # populate the form on start d_data = getinfo() form.res_msg.label = "" form.T_URL.data = str(d_data["Transit"]["T_URL"]) form.T_API_KEY.data = str(d_data["Transit"]["T_API_KEY"]) form.Stops.data = formatting(str(d_data["Transit"]["Stops"])) form.T_BUS.data = str(d_data["Transit"]["T_BUS"]) form.T_BUS_TIME.data = str(d_data["Transit"]["T_BUS_TIME"]) form.W_URL.data = str(d_data["Weather"]["W_URL"]) form.W_API_KEY.data = str(d_data["Weather"]["W_API_KEY"]) form.UNITS.data = str(d_data["Weather"]["UNITS"]) form.C_URL_1.data = str(d_data["Currency"]["C_URL_1"]) form.C_API_KEY_1.data = str(d_data["Currency"]["C_API_KEY_1"]) form.C_URL_3.data = str(d_data["Currency"]["C_URL_3"]) form.C_URL_4.data = str(d_data["Currency"]["C_URL_4"]) form.CURR_CHECK.data = formatting(str(d_data["Currency"]["CURR_CHECK"])) form.STOCK_W_URL.data = str(d_data["Stocks"]["STOCK_W_URL"]) form.STOCK_WE_URL.data = str(d_data["Stocks"]["STOCK_WE_URL"]) form.STOCK_API.data = str(d_data["Stocks"]["STOCK_API"]) form.STOCK_CHECK.data = formatting(str(d_data["Stocks"]["STOCK_CHECK"])) form.G_URL.data = str(d_data["Geolocation"]["G_URL"]) form.G_API_KEY.data = str(d_data["Geolocation"]["G_API_KEY"]) form.gsheet_json.data = str(d_data["Tasklist"]["gsheet_json"]) form.sheetname.data = str(d_data["Tasklist"]["sheetname"]) form.CREDENTIALS_FILE.data = str(d_data["G_Meetings"]["CREDENTIALS_FILE"]) form.NEWS_URL.data = str(d_data["News"]["NEWS_URL"]) form.NEWS_API.data = str(d_data["News"]["NEWS_API"]) form.NEWS_SOURCES.data = formatting(str(d_data["News"]["NEWS_SOURCES"])) form.waking_time.data = str(d_data["System"]["waking_time"]) form.sleeping_time.data = str(d_data["System"]["sleeping_time"]) form.mod_1_choice.data = str(d_data["System"]["mod_1_choice"]) form.mod_2_choice.data = str(d_data["System"]["mod_2_choice"]) form.mod_3_choice.data = str(d_data["System"]["mod_3_choice"]) form.mod_4_choice.data = str(d_data["System"]["mod_4_choice"]) form.refresh_time.data = str(d_data["System"]["refresh_time"]) form.awake.data = str(d_data["System"]["awake"]) return render_template('Settings.html', form=form) def shutdown_server(): func = request.environ.get('werkzeug.server.shutdown') if func is None: raise RuntimeError('Not running with the Werkzeug Server') func() @ app.route('/shutdown', methods=['GET']) def shutdown(): shutdown_server() return 'Server shutting down...' if __name__ == '__main__': app.run(host='0.0.0.0')
43.821596
89
0.604885
from flask import Flask, render_template, request from dashboard_forms import Dashform import json import os app = Flask(__name__) app.secret_key = 'dash_flask_key' creddir = os.path.join(os.path.dirname( os.path.dirname(os.path.realpath(__file__))), 'credentials/dash_id.json') tempdir = os.path.join(os.path.dirname( os.path.dirname(os.path.realpath(__file__))), 'www/templates/dash_id_template.json') def Convert(string): li = list(string.split(",")) k = [] for i in li: str(i).replace(' ', '') k.append(i) return k def formatting(string): string = string.replace("[", "") string = string.replace("]", "") string = string.replace("'", "") string = string.replace(" ", "") return string def json_exists(file_name): return os.path.exists(file_name) def getinfo(): data = [] if json_exists(creddir): with open(creddir, "r") as rdash_id: data = json.load(rdash_id) return data else: with open(tempdir, "r") as f1, open(creddir, "w+") as f2: f2.write(f1.read()) f2.close with open(creddir, "r") as rdash_id: data = json.load(rdash_id) return data def save_json(res): with open(creddir, 'r') as f: data = json.load(f) data["Transit"]["T_URL"] = res["T_URL"] data["Transit"]["T_API_KEY"] = res["T_API_KEY"] data["Transit"]["Stops"] = Convert(res["Stops"]) data["Transit"]["T_BUS"] = res["T_BUS"] data["Transit"]["T_BUS_TIME"] = res["T_BUS_TIME"] data["Weather"]["W_URL"] = res["W_URL"] data["Weather"]["UNITS"] = res["UNITS"] data["Weather"]["W_API_KEY"] = res["W_API_KEY"] data["Geolocation"]["G_URL"] = res["G_URL"] data["Geolocation"]["G_API_KEY"] = res["G_API_KEY"] data["Currency"]["C_URL_1"] = res["C_URL_1"] data["Currency"]["C_API_KEY_1"] = res["C_API_KEY_1"] data["Currency"]["C_URL_3"] = res["C_URL_3"] data["Currency"]["C_URL_4"] = res["C_URL_4"] data["Currency"]["CURR_CHECK"] = Convert(res["CURR_CHECK"]) data["Stocks"]["STOCK_W_URL"] = res["STOCK_W_URL"] data["Stocks"]["STOCK_WE_URL"] = res["STOCK_WE_URL"] data["Stocks"]["STOCK_API"] = res["STOCK_API"] data["Stocks"]["STOCK_CHECK"] = Convert(res["STOCK_CHECK"]) data["Tasklist"]["gsheet_json"] = res["gsheet_json"] data["Tasklist"]["sheetname"] = res["sheetname"] data["G_Meetings"]["CREDENTIALS_FILE"] = res["CREDENTIALS_FILE"] data["News"]["NEWS_URL"] = res["NEWS_URL"] data["News"]["NEWS_API"] = res["NEWS_API"] data["News"]["NEWS_SOURCES"] = str(res["NEWS_SOURCES"]).replace(' ', '') data["System"]["waking_time"] = res["waking_time"] data["System"]["sleeping_time"] = res["sleeping_time"] data["System"]["mod_1_choice"] = res["mod_1_choice"] data["System"]["mod_2_choice"] = res["mod_2_choice"] data["System"]["mod_3_choice"] = res["mod_3_choice"] data["System"]["mod_4_choice"] = res["mod_4_choice"] data["System"]["refresh_time"] = res["refresh_time"] data["System"]["awake"] = res["awake"] os.remove(creddir) with open(creddir, 'w+') as f: json.dump(data, f, indent=4) @ app.route('/', methods=['POST', 'GET']) def login(): form = Dashform() d_data = getinfo() form.res_msg.label = "" if request.method == 'POST': form.res_msg.label = "" if request.form['btn'] == 'Submit': results = request.form save_json(results) form.res_msg.label = "Information saved successfully" d_data = getinfo() form.T_URL.data = str(d_data["Transit"]["T_URL"]) form.T_API_KEY.data = str(d_data["Transit"]["T_API_KEY"]) form.Stops.data = formatting(str(d_data["Transit"]["Stops"])) form.T_BUS.data = str(d_data["Transit"]["T_BUS"]) form.T_BUS_TIME.data = str(d_data["Transit"]["T_BUS_TIME"]) form.W_URL.data = str(d_data["Weather"]["W_URL"]) form.W_API_KEY.data = str(d_data["Weather"]["W_API_KEY"]) form.UNITS.data = str(d_data["Weather"]["UNITS"]) form.C_URL_1.data = str(d_data["Currency"]["C_URL_1"]) form.C_API_KEY_1.data = str(d_data["Currency"]["C_API_KEY_1"]) form.C_URL_3.data = str(d_data["Currency"]["C_URL_3"]) form.C_URL_4.data = str(d_data["Currency"]["C_URL_4"]) form.CURR_CHECK.data = formatting(str(d_data["Currency"]["CURR_CHECK"])) form.STOCK_W_URL.data = str(d_data["Stocks"]["STOCK_W_URL"]) form.STOCK_WE_URL.data = str(d_data["Stocks"]["STOCK_WE_URL"]) form.STOCK_API.data = str(d_data["Stocks"]["STOCK_API"]) form.STOCK_CHECK.data = formatting(str(d_data["Stocks"]["STOCK_CHECK"])) form.G_URL.data = str(d_data["Geolocation"]["G_URL"]) form.G_API_KEY.data = str(d_data["Geolocation"]["G_API_KEY"]) form.gsheet_json.data = str(d_data["Tasklist"]["gsheet_json"]) form.sheetname.data = str(d_data["Tasklist"]["sheetname"]) form.CREDENTIALS_FILE.data = str(d_data["G_Meetings"]["CREDENTIALS_FILE"]) form.NEWS_URL.data = str(d_data["News"]["NEWS_URL"]) form.NEWS_API.data = str(d_data["News"]["NEWS_API"]) form.NEWS_SOURCES.data = formatting(str(d_data["News"]["NEWS_SOURCES"])) form.waking_time.data = str(d_data["System"]["waking_time"]) form.sleeping_time.data = str(d_data["System"]["sleeping_time"]) form.mod_1_choice.data = str(d_data["System"]["mod_1_choice"]) form.mod_2_choice.data = str(d_data["System"]["mod_2_choice"]) form.mod_3_choice.data = str(d_data["System"]["mod_3_choice"]) form.mod_4_choice.data = str(d_data["System"]["mod_4_choice"]) form.refresh_time.data = str(d_data["System"]["refresh_time"]) form.awake.data = str(d_data["System"]["awake"]) return render_template('Settings.html', form=form) elif request.method == 'GET': # populate the form on start d_data = getinfo() form.res_msg.label = "" form.T_URL.data = str(d_data["Transit"]["T_URL"]) form.T_API_KEY.data = str(d_data["Transit"]["T_API_KEY"]) form.Stops.data = formatting(str(d_data["Transit"]["Stops"])) form.T_BUS.data = str(d_data["Transit"]["T_BUS"]) form.T_BUS_TIME.data = str(d_data["Transit"]["T_BUS_TIME"]) form.W_URL.data = str(d_data["Weather"]["W_URL"]) form.W_API_KEY.data = str(d_data["Weather"]["W_API_KEY"]) form.UNITS.data = str(d_data["Weather"]["UNITS"]) form.C_URL_1.data = str(d_data["Currency"]["C_URL_1"]) form.C_API_KEY_1.data = str(d_data["Currency"]["C_API_KEY_1"]) form.C_URL_3.data = str(d_data["Currency"]["C_URL_3"]) form.C_URL_4.data = str(d_data["Currency"]["C_URL_4"]) form.CURR_CHECK.data = formatting(str(d_data["Currency"]["CURR_CHECK"])) form.STOCK_W_URL.data = str(d_data["Stocks"]["STOCK_W_URL"]) form.STOCK_WE_URL.data = str(d_data["Stocks"]["STOCK_WE_URL"]) form.STOCK_API.data = str(d_data["Stocks"]["STOCK_API"]) form.STOCK_CHECK.data = formatting(str(d_data["Stocks"]["STOCK_CHECK"])) form.G_URL.data = str(d_data["Geolocation"]["G_URL"]) form.G_API_KEY.data = str(d_data["Geolocation"]["G_API_KEY"]) form.gsheet_json.data = str(d_data["Tasklist"]["gsheet_json"]) form.sheetname.data = str(d_data["Tasklist"]["sheetname"]) form.CREDENTIALS_FILE.data = str(d_data["G_Meetings"]["CREDENTIALS_FILE"]) form.NEWS_URL.data = str(d_data["News"]["NEWS_URL"]) form.NEWS_API.data = str(d_data["News"]["NEWS_API"]) form.NEWS_SOURCES.data = formatting(str(d_data["News"]["NEWS_SOURCES"])) form.waking_time.data = str(d_data["System"]["waking_time"]) form.sleeping_time.data = str(d_data["System"]["sleeping_time"]) form.mod_1_choice.data = str(d_data["System"]["mod_1_choice"]) form.mod_2_choice.data = str(d_data["System"]["mod_2_choice"]) form.mod_3_choice.data = str(d_data["System"]["mod_3_choice"]) form.mod_4_choice.data = str(d_data["System"]["mod_4_choice"]) form.refresh_time.data = str(d_data["System"]["refresh_time"]) form.awake.data = str(d_data["System"]["awake"]) return render_template('Settings.html', form=form) def shutdown_server(): func = request.environ.get('werkzeug.server.shutdown') if func is None: raise RuntimeError('Not running with the Werkzeug Server') func() @ app.route('/shutdown', methods=['GET']) def shutdown(): shutdown_server() return 'Server shutting down...' if __name__ == '__main__': app.run(host='0.0.0.0')
true
true
f703d5bcb5149ae9902f6dabfb5129e5c54b227c
548
py
Python
Thesis@3.9.1/Lib/site-packages/django/db/backends/base/client.py
nverbois/TFE21-232
7113837b5263b5c508bfc6903cb6982b48aa7ee4
[ "MIT" ]
null
null
null
Thesis@3.9.1/Lib/site-packages/django/db/backends/base/client.py
nverbois/TFE21-232
7113837b5263b5c508bfc6903cb6982b48aa7ee4
[ "MIT" ]
null
null
null
Thesis@3.9.1/Lib/site-packages/django/db/backends/base/client.py
nverbois/TFE21-232
7113837b5263b5c508bfc6903cb6982b48aa7ee4
[ "MIT" ]
null
null
null
class BaseDatabaseClient: """Encapsulate backend-specific methods for opening a client shell.""" # This should be a string representing the name of the executable # (e.g., "psql"). Subclasses must override this. executable_name = None def __init__(self, connection): # connection is an instance of BaseDatabaseWrapper. self.connection = connection def runshell(self, parameters): raise NotImplementedError( "subclasses of BaseDatabaseClient must provide a runshell() method" )
34.25
79
0.691606
class BaseDatabaseClient: executable_name = None def __init__(self, connection): self.connection = connection def runshell(self, parameters): raise NotImplementedError( "subclasses of BaseDatabaseClient must provide a runshell() method" )
true
true
f703d886c8b787f5722852a6e7cd7aa2d38d2ecf
2,072
py
Python
tests/abstractions/test_observer.py
symuvia/symupy
e6604c59bb4474f594ef5c997508f0407c9b3870
[ "MIT" ]
2
2019-07-01T09:58:53.000Z
2020-06-12T12:12:46.000Z
tests/abstractions/test_observer.py
licit-lab/symupy
942a17ee78cd12a363a4cd7b7f8363e239ccf7fe
[ "MIT" ]
33
2021-01-18T13:59:01.000Z
2021-11-29T13:21:10.000Z
tests/abstractions/test_observer.py
licit-lab/symupy
942a17ee78cd12a363a4cd7b7f8363e239ccf7fe
[ "MIT" ]
7
2018-07-12T13:34:38.000Z
2019-10-02T13:37:31.000Z
""" Unit tests for symupy.api.stream """ # ============================================================================ # STANDARD IMPORTS # ============================================================================ import pytest # ============================================================================ # INTERNAL IMPORTS # ============================================================================ from symupy.runtime.logic.publisher import Publisher from symupy.runtime.logic.subscriber import Subscriber # ============================================================================ # TESTS AND DEFINITIONS # ============================================================================ @pytest.fixture def default_channel(): return ("default",) @pytest.fixture def channels(): return ("channel 1", "channel 2") def test_default_constructor(default_channel): p = Publisher() assert p.channels == default_channel def test_default_attach_observer(default_channel): p = Publisher() s = Subscriber(p) assert p.channels == default_channel assert p._channels[default_channel[0]][s] == s.update def test_constructor_channels(channels): p = Publisher(channels) assert p.channels == channels def test_attach_observer(channels): p = Publisher(channels) s = Subscriber(p, channels[0]) assert p.channels == channels assert p._channels[channels[0]][s] == s.update def test_attach_detach_observer(channels): p = Publisher(channels) s = Subscriber(p, channels[0]) assert p._channels[channels[0]][s] == s.update def test_context_publisher(channels): with Publisher(channels) as p: s1 = Subscriber(p, channels[0]) s2 = Subscriber(p, channels[0]) p.dispatch(channels[0]) assert s1._call == 1 assert s2._call == 1 def test_context_observer(channels): with Publisher(channels) as p: with Subscriber(p, channels[0]), Subscriber(p, channels[1]): p.dispatch(channels[0]) def test_context_dispatch(channels): pass
25.9
78
0.533784
import pytest from symupy.runtime.logic.publisher import Publisher from symupy.runtime.logic.subscriber import Subscriber @pytest.fixture def default_channel(): return ("default",) @pytest.fixture def channels(): return ("channel 1", "channel 2") def test_default_constructor(default_channel): p = Publisher() assert p.channels == default_channel def test_default_attach_observer(default_channel): p = Publisher() s = Subscriber(p) assert p.channels == default_channel assert p._channels[default_channel[0]][s] == s.update def test_constructor_channels(channels): p = Publisher(channels) assert p.channels == channels def test_attach_observer(channels): p = Publisher(channels) s = Subscriber(p, channels[0]) assert p.channels == channels assert p._channels[channels[0]][s] == s.update def test_attach_detach_observer(channels): p = Publisher(channels) s = Subscriber(p, channels[0]) assert p._channels[channels[0]][s] == s.update def test_context_publisher(channels): with Publisher(channels) as p: s1 = Subscriber(p, channels[0]) s2 = Subscriber(p, channels[0]) p.dispatch(channels[0]) assert s1._call == 1 assert s2._call == 1 def test_context_observer(channels): with Publisher(channels) as p: with Subscriber(p, channels[0]), Subscriber(p, channels[1]): p.dispatch(channels[0]) def test_context_dispatch(channels): pass
true
true
f703d887e5f0d59a66cda0527be6937a58680515
1,666
gyp
Python
ion/image/tests/image_tests.gyp
RobLoach/ion
9e659416fb04bb3d3a67df1e018d7c2ccab9d468
[ "Apache-2.0" ]
null
null
null
ion/image/tests/image_tests.gyp
RobLoach/ion
9e659416fb04bb3d3a67df1e018d7c2ccab9d468
[ "Apache-2.0" ]
null
null
null
ion/image/tests/image_tests.gyp
RobLoach/ion
9e659416fb04bb3d3a67df1e018d7c2ccab9d468
[ "Apache-2.0" ]
null
null
null
# # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # { 'includes': [ '../../common.gypi', ], 'targets': [ { 'target_name': 'ionimage_test', 'includes': [ '../../dev/test_target.gypi' ], 'sources' : [ 'conversionutils_test.cc', 'ninepatch_test.cc', 'renderutils_test.cc', ], 'dependencies' : [ 'image_tests_assets', '<(ion_dir)/image/image.gyp:ionimage_for_tests', '<(ion_dir)/base/base.gyp:ionbase_for_tests', '<(ion_dir)/external/gtest.gyp:iongtest_safeallocs', '<(ion_dir)/port/port.gyp:ionport', '<(ion_dir)/gfx/gfx.gyp:iongfx_for_tests', '<(ion_dir)/gfxutils/gfxutils.gyp:iongfxutils_for_tests', '<(ion_dir)/portgfx/portgfx.gyp:ionportgfx_for_tests', ], }, { 'target_name': 'image_tests_assets', 'type': 'static_library', 'includes': [ '../../dev/zipasset_generator.gypi', ], 'sources' : [ 'data/images.iad', ], 'dependencies' : [ '<(ion_dir)/base/base.gyp:ionbase_for_tests', ], }, ], }
28.724138
74
0.620048
{ 'includes': [ '../../common.gypi', ], 'targets': [ { 'target_name': 'ionimage_test', 'includes': [ '../../dev/test_target.gypi' ], 'sources' : [ 'conversionutils_test.cc', 'ninepatch_test.cc', 'renderutils_test.cc', ], 'dependencies' : [ 'image_tests_assets', '<(ion_dir)/image/image.gyp:ionimage_for_tests', '<(ion_dir)/base/base.gyp:ionbase_for_tests', '<(ion_dir)/external/gtest.gyp:iongtest_safeallocs', '<(ion_dir)/port/port.gyp:ionport', '<(ion_dir)/gfx/gfx.gyp:iongfx_for_tests', '<(ion_dir)/gfxutils/gfxutils.gyp:iongfxutils_for_tests', '<(ion_dir)/portgfx/portgfx.gyp:ionportgfx_for_tests', ], }, { 'target_name': 'image_tests_assets', 'type': 'static_library', 'includes': [ '../../dev/zipasset_generator.gypi', ], 'sources' : [ 'data/images.iad', ], 'dependencies' : [ '<(ion_dir)/base/base.gyp:ionbase_for_tests', ], }, ], }
true
true
f703d8ac1afa839366c06b687eddbcf3c2cbd0eb
50
py
Python
test/SIM_test_ip/Modified_data/nested_includes.py
iamthad/trick
88ac5b5990228e42a653347c9d7a103acea4d137
[ "NASA-1.3" ]
647
2015-05-07T16:08:16.000Z
2022-03-30T02:33:21.000Z
test/SIM_test_ip/Modified_data/nested_includes.py
tanglemontree/trick
f182c723495185708434e67789457eb29d52ad58
[ "NASA-1.3" ]
995
2015-04-30T19:44:31.000Z
2022-03-31T20:14:44.000Z
test/SIM_test_ip/Modified_data/nested_includes.py
tanglemontree/trick
f182c723495185708434e67789457eb29d52ad58
[ "NASA-1.3" ]
251
2015-05-15T09:24:34.000Z
2022-03-22T20:39:05.000Z
exec(open("Modified_data/next_level.py").read())
16.666667
48
0.74
exec(open("Modified_data/next_level.py").read())
true
true
f703d8cbc3d8b80905b75b7ed799f9b2c558bc79
101
py
Python
stlearn/tools/microenv/cci/__init__.py
duypham2108/dev_st
47adcfa5803eba7549b1185ec69d2317b386d9ff
[ "BSD-3-Clause" ]
67
2020-06-01T05:19:23.000Z
2022-03-31T20:47:50.000Z
stlearn/tools/microenv/cci/__init__.py
duypham2108/dev_st
47adcfa5803eba7549b1185ec69d2317b386d9ff
[ "BSD-3-Clause" ]
34
2020-11-02T18:01:43.000Z
2022-03-16T21:58:54.000Z
stlearn/tools/microenv/cci/__init__.py
duypham2108/dev_st
47adcfa5803eba7549b1185ec69d2317b386d9ff
[ "BSD-3-Clause" ]
13
2020-05-14T05:10:22.000Z
2022-03-09T14:05:38.000Z
from .base import lr from . import het from .merge import merge from .permutation import permutation
20.2
36
0.80198
from .base import lr from . import het from .merge import merge from .permutation import permutation
true
true
f703da02f25d5ed4cc390c9c56ecc410d25fc0b9
2,488
py
Python
events/tracon8/management/commands/setup_tracon8.py
jlaunonen/turska
fc6ec4e0ae50a823e931152ce8835098b96f5966
[ "CC-BY-3.0" ]
null
null
null
events/tracon8/management/commands/setup_tracon8.py
jlaunonen/turska
fc6ec4e0ae50a823e931152ce8835098b96f5966
[ "CC-BY-3.0" ]
null
null
null
events/tracon8/management/commands/setup_tracon8.py
jlaunonen/turska
fc6ec4e0ae50a823e931152ce8835098b96f5966
[ "CC-BY-3.0" ]
null
null
null
# encoding: utf-8 from datetime import datetime, timedelta from django.conf import settings from django.contrib.auth.models import Group from django.core.management.base import BaseCommand from dateutil.tz import tzlocal from core.models import Event, Venue from programme.models import ProgrammeEventMeta, TimeBlock, SpecialStartTime class Command(BaseCommand): args = '' help = 'Setup tracon8 specific stuff' def handle(*args, **options): tz = tzlocal() venue, unused = Venue.objects.get_or_create(name='Tampere-talo') event, unused = Event.objects.get_or_create(slug='tracon8', defaults=dict( name='Tracon 8', name_genitive='Tracon 8 -tapahtuman', name_illative='Tracon 8 -tapahtumaan', name_inessive='Tracon 8 -tapahtumassa', homepage_url='http://2013.tracon.fi', organization_name='Tracon ry', organization_url='http://ry.tracon.fi', start_time=datetime(2013, 9, 14, 10, 0, tzinfo=tz), end_time=datetime(2013, 9, 15, 18, 0, tzinfo=tz), venue=venue, )) admin_group_name = "{installation_name}-{event_slug}-programme-admins".format( installation_name=settings.KOMPASSI_INSTALLATION_SLUG, event_slug=event.slug, ) admin_group, unused = Group.objects.get_or_create(name=admin_group_name) programme_event_meta, unused = ProgrammeEventMeta.objects.get_or_create(event=event, defaults=dict( public=True, admin_group=admin_group )) # v5 if not programme_event_meta.contact_email: programme_event_meta.contact_email = 'ohjelma@tracon.fi' programme_event_meta.save() # v6 for start_time, end_time in [ ( datetime(2013, 9, 14, 11, 0, 0, tzinfo=tz), datetime(2013, 9, 15, 1 , 0, 0, tzinfo=tz) ), ( datetime(2013, 9, 15, 9 , 0, 0, tzinfo=tz), datetime(2013, 9, 15, 17, 0, 0, tzinfo=tz) ) ]: TimeBlock.objects.get_or_create( event=event, start_time=start_time, defaults=dict( end_time=end_time ) ) SpecialStartTime.objects.get_or_create( event=event, start_time=datetime(2013, 9, 14, 10, 30, 0, tzinfo=tz), )
33.621622
107
0.592042
from datetime import datetime, timedelta from django.conf import settings from django.contrib.auth.models import Group from django.core.management.base import BaseCommand from dateutil.tz import tzlocal from core.models import Event, Venue from programme.models import ProgrammeEventMeta, TimeBlock, SpecialStartTime class Command(BaseCommand): args = '' help = 'Setup tracon8 specific stuff' def handle(*args, **options): tz = tzlocal() venue, unused = Venue.objects.get_or_create(name='Tampere-talo') event, unused = Event.objects.get_or_create(slug='tracon8', defaults=dict( name='Tracon 8', name_genitive='Tracon 8 -tapahtuman', name_illative='Tracon 8 -tapahtumaan', name_inessive='Tracon 8 -tapahtumassa', homepage_url='http://2013.tracon.fi', organization_name='Tracon ry', organization_url='http://ry.tracon.fi', start_time=datetime(2013, 9, 14, 10, 0, tzinfo=tz), end_time=datetime(2013, 9, 15, 18, 0, tzinfo=tz), venue=venue, )) admin_group_name = "{installation_name}-{event_slug}-programme-admins".format( installation_name=settings.KOMPASSI_INSTALLATION_SLUG, event_slug=event.slug, ) admin_group, unused = Group.objects.get_or_create(name=admin_group_name) programme_event_meta, unused = ProgrammeEventMeta.objects.get_or_create(event=event, defaults=dict( public=True, admin_group=admin_group )) if not programme_event_meta.contact_email: programme_event_meta.contact_email = 'ohjelma@tracon.fi' programme_event_meta.save() for start_time, end_time in [ ( datetime(2013, 9, 14, 11, 0, 0, tzinfo=tz), datetime(2013, 9, 15, 1 , 0, 0, tzinfo=tz) ), ( datetime(2013, 9, 15, 9 , 0, 0, tzinfo=tz), datetime(2013, 9, 15, 17, 0, 0, tzinfo=tz) ) ]: TimeBlock.objects.get_or_create( event=event, start_time=start_time, defaults=dict( end_time=end_time ) ) SpecialStartTime.objects.get_or_create( event=event, start_time=datetime(2013, 9, 14, 10, 30, 0, tzinfo=tz), )
true
true
f703da2c8363ee59f03ded7651a552589e14a980
470
py
Python
ml_tutorial/test.py
simonfong6/micro-projects
5be195ea72ce117df6da041446f11c18e102b5df
[ "MIT" ]
null
null
null
ml_tutorial/test.py
simonfong6/micro-projects
5be195ea72ce117df6da041446f11c18e102b5df
[ "MIT" ]
null
null
null
ml_tutorial/test.py
simonfong6/micro-projects
5be195ea72ce117df6da041446f11c18e102b5df
[ "MIT" ]
null
null
null
import svm as SVM import numpy as np data_dict = { -1:np.array( [[10,9,1], [2,8,1], [3,8,1],]), 1:np.array( [[5,1,1], [6,-1,1], [7,3,1],])} svm = SVM.Support_Vector_Machine() svm.fit(data=data_dict) predict_us = [[0,10,1], [1,3,1], [3,4,1], [3,5,1], [5,5,1], [5,6,1], [6,-5,1], [5,8,1]] for p in predict_us: svm.predict(p) svm.visualize()
16.785714
37
0.412766
import svm as SVM import numpy as np data_dict = { -1:np.array( [[10,9,1], [2,8,1], [3,8,1],]), 1:np.array( [[5,1,1], [6,-1,1], [7,3,1],])} svm = SVM.Support_Vector_Machine() svm.fit(data=data_dict) predict_us = [[0,10,1], [1,3,1], [3,4,1], [3,5,1], [5,5,1], [5,6,1], [6,-5,1], [5,8,1]] for p in predict_us: svm.predict(p) svm.visualize()
true
true
f703db006bf2006e44c7efb17e8369f7a87596e3
3,745
py
Python
scripts/webui-sc/confeditor.py
lucywitherall/eve-pi
14f4555d0b89eaf8b4a9aed9f629c9358c4b121d
[ "MIT" ]
null
null
null
scripts/webui-sc/confeditor.py
lucywitherall/eve-pi
14f4555d0b89eaf8b4a9aed9f629c9358c4b121d
[ "MIT" ]
null
null
null
scripts/webui-sc/confeditor.py
lucywitherall/eve-pi
14f4555d0b89eaf8b4a9aed9f629c9358c4b121d
[ "MIT" ]
1
2019-10-07T12:56:55.000Z
2019-10-07T12:56:55.000Z
#!/usr/bin/python3 import configparser config = configparser.ConfigParser() config.read('eve-conf.ini') def int_imp(inp): while True: try: int(inp) break except ValueError: print('Input has to be a number.') inp = input('Select again: ') return int(inp) def section_select(config): csections = config.sections() for section in csections: print('{:>2}. {}'.format(csections.index(section),section)) num = len(csections) print('% 2.0f. View All' % (num)) num2 = num + 1 print('%- 2.0f. Save File' % (num2)) num3 = num2 + 1 print('% 2.0f. Exit' % (num3)) while True: inp = input('Select section to edit/option: ') inp = int_imp(inp) print() if inp == num: print_conf(config) break elif inp == num2: save_file(config) break elif inp == num3: print('Editor Closed') break elif inp < 0 or inp > num3: print('Try again') else: item_editor(config, csections[inp]) break def menu(): print() print('Menu') print('{:>2}. Edit a Section'.format(0)) print('{:>2}. View File'.format(1)) print('{:>2}. Save File'.format(2)) print('{:>2}. Exit'.format(3)) while True: inp = input('Select option: ') inp = int_imp(inp) print() if inp == 0: section_select(config) break elif inp == 1: print_conf(config) break elif inp == 2: save_file(config) break elif inp == 3: print('Editor Closed') break elif inp < 0 or inp > 3: print('Try again') def print_conf(config): csections = config.sections() for section in csections: print() print('Section: %s' % (csections[csections.index(section)])) items = config.items(csections[csections.index(section)]) for item in items: print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1])) menu() def save_file(config): with open('eve-conf.ini', 'w') as cfgfile: config.write(cfgfile) cfgfile.close() print('Config Saved') menu() def item_editor(config, section): csections = config.sections() items = config.items(section) print('Section: {}'.format(section)) for item in items: print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1])) print() menu_b = items.index(item) + 1 print('{:>2}. Back'.format(menu_b)) inp2 = input('Select key to edit: ') inp2 = int_imp(inp2) if inp2 == menu_b: menu() elif inp2 < 0 or inp2 > menu_b: print('Try Agin') item_editor(config, section) else: inp2 = int_imp(inp2) change = input('New value: ') old_value = config[section][items[inp2][0]] config.set(section,items[inp2][0],change) print() print('Section: %s' % (section)) items = config.items(section) for item in items: print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1])) conf = input('Confim Change [y,N]: ') if conf == 'y' or conf == 'Y': print('Config File Edited.') else: config.set(section,items[inp2][0],old_value) print('Config File Not Changed.') print() another = input('Edit another key in this section [y,N]: ') if another == 'y' or another == 'Y': print() item_editor(config,section) else: menu() section_select(config)
27.137681
81
0.5247
import configparser config = configparser.ConfigParser() config.read('eve-conf.ini') def int_imp(inp): while True: try: int(inp) break except ValueError: print('Input has to be a number.') inp = input('Select again: ') return int(inp) def section_select(config): csections = config.sections() for section in csections: print('{:>2}. {}'.format(csections.index(section),section)) num = len(csections) print('% 2.0f. View All' % (num)) num2 = num + 1 print('%- 2.0f. Save File' % (num2)) num3 = num2 + 1 print('% 2.0f. Exit' % (num3)) while True: inp = input('Select section to edit/option: ') inp = int_imp(inp) print() if inp == num: print_conf(config) break elif inp == num2: save_file(config) break elif inp == num3: print('Editor Closed') break elif inp < 0 or inp > num3: print('Try again') else: item_editor(config, csections[inp]) break def menu(): print() print('Menu') print('{:>2}. Edit a Section'.format(0)) print('{:>2}. View File'.format(1)) print('{:>2}. Save File'.format(2)) print('{:>2}. Exit'.format(3)) while True: inp = input('Select option: ') inp = int_imp(inp) print() if inp == 0: section_select(config) break elif inp == 1: print_conf(config) break elif inp == 2: save_file(config) break elif inp == 3: print('Editor Closed') break elif inp < 0 or inp > 3: print('Try again') def print_conf(config): csections = config.sections() for section in csections: print() print('Section: %s' % (csections[csections.index(section)])) items = config.items(csections[csections.index(section)]) for item in items: print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1])) menu() def save_file(config): with open('eve-conf.ini', 'w') as cfgfile: config.write(cfgfile) cfgfile.close() print('Config Saved') menu() def item_editor(config, section): csections = config.sections() items = config.items(section) print('Section: {}'.format(section)) for item in items: print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1])) print() menu_b = items.index(item) + 1 print('{:>2}. Back'.format(menu_b)) inp2 = input('Select key to edit: ') inp2 = int_imp(inp2) if inp2 == menu_b: menu() elif inp2 < 0 or inp2 > menu_b: print('Try Agin') item_editor(config, section) else: inp2 = int_imp(inp2) change = input('New value: ') old_value = config[section][items[inp2][0]] config.set(section,items[inp2][0],change) print() print('Section: %s' % (section)) items = config.items(section) for item in items: print('{:>2}. {:<24}: {}'.format(items.index(item),item[0], item[1])) conf = input('Confim Change [y,N]: ') if conf == 'y' or conf == 'Y': print('Config File Edited.') else: config.set(section,items[inp2][0],old_value) print('Config File Not Changed.') print() another = input('Edit another key in this section [y,N]: ') if another == 'y' or another == 'Y': print() item_editor(config,section) else: menu() section_select(config)
true
true
f703db3a5054be83b8f321fde2dfcb1f74eb3ea4
3,132
py
Python
examples/distancePlusAmplitude.py
OnionIoT/tau-lidar-camera
a70b24e18be8e4c5abfe525c6768fbc10a492fd8
[ "MIT" ]
31
2020-12-18T16:35:15.000Z
2022-03-25T18:41:19.000Z
examples/distancePlusAmplitude.py
OnionIoT/tau-lidar-camera
a70b24e18be8e4c5abfe525c6768fbc10a492fd8
[ "MIT" ]
17
2020-11-18T16:10:36.000Z
2022-02-01T22:19:11.000Z
examples/distancePlusAmplitude.py
OnionIoT/tau-lidar-camera
a70b24e18be8e4c5abfe525c6768fbc10a492fd8
[ "MIT" ]
4
2021-01-18T17:25:02.000Z
2021-11-01T13:25:45.000Z
import argparse import numpy as np import cv2 from TauLidarCommon.frame import FrameType from TauLidarCamera.camera import Camera def setup(serialPort=None): port = None camera = None # if no serial port is specified, scan for available Tau Camera devices if serialPort is None: ports = Camera.scan() ## Scan for available Tau Camera devices if len(ports) > 0: port = ports[0] else: port = serialPort if port is not None: Camera.setRange(0, 4500) ## points in the distance range to be colored camera = Camera.open(port) ## Open the first available Tau Camera camera.setModulationChannel(0) ## autoChannelEnabled: 0, channel: 0 camera.setIntegrationTime3d(0, 1000) ## set integration time 0: 1000 camera.setMinimalAmplitude(0, 10) ## set minimal amplitude 0: 80 cameraInfo = camera.info() print("\nToF camera opened successfully:") print(" model: %s" % cameraInfo.model) print(" firmware: %s" % cameraInfo.firmware) print(" uid: %s" % cameraInfo.uid) print(" resolution: %s" % cameraInfo.resolution) print(" port: %s" % cameraInfo.port) print("\nPress Esc key over GUI or Ctrl-c in terminal to shutdown ...") cv2.namedWindow('Depth Map') cv2.namedWindow('Amplitude') cv2.moveWindow('Depth Map', 20, 20) cv2.moveWindow('Amplitude', 20, 360) return camera def run(camera): while True: frame = camera.readFrame(FrameType.DISTANCE_AMPLITUDE) if frame: mat_depth_rgb = np.frombuffer(frame.data_depth_rgb, dtype=np.uint16, count=-1, offset=0).reshape(frame.height, frame.width, 3) mat_depth_rgb = mat_depth_rgb.astype(np.uint8) mat_amplitude = np.frombuffer(frame.data_amplitude, dtype=np.float32, count=-1, offset=0).reshape(frame.height, frame.width) mat_amplitude = mat_amplitude.astype(np.uint8) # Upscalling the image upscale = 4 depth_img = cv2.resize(mat_depth_rgb, (frame.width*upscale, frame.height*upscale)) amplitude_img = cv2.resize(mat_amplitude, (frame.width*upscale, frame.height*upscale)) cv2.imshow('Depth Map', depth_img) cv2.imshow('Amplitude', amplitude_img) if cv2.waitKey(1) == 27: break def cleanup(camera): print('\nShutting down ...') cv2.destroyAllWindows() camera.close() if __name__ == "__main__": parser = argparse.ArgumentParser(description='Sample program to demonstrate acquiring frames with both distance / depth and amplitude data from the Tau LiDAR Camera') parser.add_argument('--port', metavar='<serial port>', default=None, help='Specify a serial port for the Tau Camera') args = parser.parse_args() camera = setup(args.port) if camera: try: run(camera) except Exception as e: print(e) cleanup(camera)
33.319149
170
0.618455
import argparse import numpy as np import cv2 from TauLidarCommon.frame import FrameType from TauLidarCamera.camera import Camera def setup(serialPort=None): port = None camera = None if serialPort is None: ports = Camera.scan() if len(ports) > 0: port = ports[0] else: port = serialPort if port is not None: Camera.setRange(0, 4500) camera = Camera.open(port) camera.setModulationChannel(0) camera.setIntegrationTime3d(0, 1000) camera.setMinimalAmplitude(0, 10) cameraInfo = camera.info() print("\nToF camera opened successfully:") print(" model: %s" % cameraInfo.model) print(" firmware: %s" % cameraInfo.firmware) print(" uid: %s" % cameraInfo.uid) print(" resolution: %s" % cameraInfo.resolution) print(" port: %s" % cameraInfo.port) print("\nPress Esc key over GUI or Ctrl-c in terminal to shutdown ...") cv2.namedWindow('Depth Map') cv2.namedWindow('Amplitude') cv2.moveWindow('Depth Map', 20, 20) cv2.moveWindow('Amplitude', 20, 360) return camera def run(camera): while True: frame = camera.readFrame(FrameType.DISTANCE_AMPLITUDE) if frame: mat_depth_rgb = np.frombuffer(frame.data_depth_rgb, dtype=np.uint16, count=-1, offset=0).reshape(frame.height, frame.width, 3) mat_depth_rgb = mat_depth_rgb.astype(np.uint8) mat_amplitude = np.frombuffer(frame.data_amplitude, dtype=np.float32, count=-1, offset=0).reshape(frame.height, frame.width) mat_amplitude = mat_amplitude.astype(np.uint8) upscale = 4 depth_img = cv2.resize(mat_depth_rgb, (frame.width*upscale, frame.height*upscale)) amplitude_img = cv2.resize(mat_amplitude, (frame.width*upscale, frame.height*upscale)) cv2.imshow('Depth Map', depth_img) cv2.imshow('Amplitude', amplitude_img) if cv2.waitKey(1) == 27: break def cleanup(camera): print('\nShutting down ...') cv2.destroyAllWindows() camera.close() if __name__ == "__main__": parser = argparse.ArgumentParser(description='Sample program to demonstrate acquiring frames with both distance / depth and amplitude data from the Tau LiDAR Camera') parser.add_argument('--port', metavar='<serial port>', default=None, help='Specify a serial port for the Tau Camera') args = parser.parse_args() camera = setup(args.port) if camera: try: run(camera) except Exception as e: print(e) cleanup(camera)
true
true
f703dc1425cb1df6daa7defbf0f85faec7d5dac6
5,614
py
Python
minqlx-plugins/branding.py
tjone270/Quake-Live
f3864ab74752218044fbe6f43fba3978d519e3f1
[ "RSA-MD" ]
35
2015-11-23T22:46:40.000Z
2021-11-26T08:11:02.000Z
minqlx-plugins/branding.py
tjone270/Quake-Live
f3864ab74752218044fbe6f43fba3978d519e3f1
[ "RSA-MD" ]
14
2016-01-21T22:17:19.000Z
2017-12-30T06:34:08.000Z
minqlx-plugins/branding.py
tjone270/Quake-Live
f3864ab74752218044fbe6f43fba3978d519e3f1
[ "RSA-MD" ]
24
2015-12-10T16:49:28.000Z
2021-08-15T00:24:25.000Z
# Created by Thomas Jones on 06/11/15 - thomas@tomtecsolutions.com # branding.py, a plugin for minqlx to brand your server. # This plugin is released to everyone, for any purpose. It comes with no warranty, no guarantee it works, it's released AS IS. # You can modify everything, except for lines 1-4 and the !tomtec_versions code. They're there to indicate I whacked this together originally. Please make it better :D """ Branding.py is a minqlx plugin that permits you to personalise your server with your own information. Simply put the plugin in the 'minqlx-plugins' folder, !load the plugin, and set these cvars: qlx_serverBrandName - Where the map name usually appears, the text set in this cvar will appear instead. qlx_serverBrandTopField - Where the map author credit (line 1) appears, the text set in this cvar will appear after the credit. qlx_serverBrandBottomField - Where the map author credit (line 2) appears, the text set in this cvar will appear after the credit. qlx_connectMessage - When the player is at the awaiting challenge screen when they first connect to the server, text will appear here. qlx_loadedMessage - When the player gets to the menu after connecting, and clicks Join or Spectate, they'll get centre print from this cvar. qlx_countdownMessage - When the countdown begins, this text will appear mid-screen. (like the qlx_loadedMessage does) qlx_endOfGameMessage - When the game finishes, it'll put the text in this cvar in the text box on the left. qlx_brandingPrependMapName - This cvar will put the map name before your qlx_serverBrandName. Default: 0 qlx_brandingAppendGameType - Will add the game type after your qlx_serverBrandName. Default: 0 qlx_rainbowBrandName - Make the entire map name (qlx_serverBrandName) appear in rainbow colouring. Default: 0 Once set, change maps, and you'll see the map loading screen is changed. """ import minqlx class branding(minqlx.Plugin): def __init__(self): self.add_hook("new_game", self.brand_map) self.add_hook("player_connect", self.player_connect) self.add_hook("player_loaded", self.player_loaded) self.add_hook("game_countdown", self.game_countdown) self.add_hook("game_end", self.game_end) self.add_command("tomtec_versions", self.cmd_showversion) self.set_cvar_once("qlx_brandingPrependMapName", "0") self.set_cvar_once("qlx_brandingAppendGameType", "0") self.set_cvar_once("qlx_rainbowBrandName", "0") self.plugin_version = "2.1" self.playerConnectedYetList = [] def brand_map(self): if self.get_cvar("qlx_serverBrandName") == None: self.set_cvar("qlx_serverBrandName", self.game.map_title) if self.get_cvar("qlx_brandingPrependMapName", bool): topBranding = self.game.map_title + " " + self.get_cvar("qlx_serverBrandName") else: topBranding = self.get_cvar("qlx_serverBrandName") if self.get_cvar("qlx_brandingAppendGameType", bool): minqlx.set_configstring(3, topBranding + " " + self.game.type) else: minqlx.set_configstring(3, topBranding) if self.get_cvar("qlx_serverBrandTopField") != None: cs = self.game.map_subtitle1 if cs: cs += " - " minqlx.set_configstring(678, cs + (self.get_cvar("qlx_serverBrandTopField"))) if self.get_cvar("qlx_serverBrandBottomField") != None: cs = self.game.map_subtitle2 if cs: cs += " - " minqlx.set_configstring(679, cs + (self.get_cvar("qlx_serverBrandBottomField"))) if self.get_cvar("qlx_rainbowBrandName", bool): # Thanks Mino for this bit! def rotating_colors(): i = 0 while True: res = (i % 7) + 1 i += 1 yield res map_name = self.clean_text(minqlx.get_configstring(3)) r = rotating_colors() res = "" for i in range(len(map_name)): res += "^{}{}".format(next(r), map_name[i]) minqlx.set_configstring(3, res) def player_connect(self, player): if self.get_cvar("qlx_connectMessage") != None: if player not in self.playerConnectedYetList: self.playerConnectedYetList.append(player) return "{}\n^7This server is running ^4branding.py^7. ^2http://github.com/tjone270/Quake-Live^7.\n".format(self.get_cvar("qlx_connectMessage")) def player_loaded(self, player): if self.get_cvar("qlx_loadedMessage") != None: self.center_print(self.get_cvar("qlx_loadedMessage"), player.id) try: self.playerConnectedYetList.remove(player) except: return def game_countdown(self): if self.get_cvar("qlx_countdownMessage") != None: self.center_print(self.get_cvar("qlx_countdownMessage")) def game_end(self, data): if self.get_cvar("qlx_endOfGameMessage") != None: self.msg(self.get_cvar("qlx_endOfGameMessage")) def cmd_showversion(self, player, msg, channel): channel.reply("^4branding.py^7 - version {}, created by Thomas Jones on 06/11/2015.".format(self.plugin_version))
48.817391
167
0.63876
# You can modify everything, except for lines 1-4 and the !tomtec_versions code. They're there to indicate I whacked this together originally. Please make it better :D import minqlx class branding(minqlx.Plugin): def __init__(self): self.add_hook("new_game", self.brand_map) self.add_hook("player_connect", self.player_connect) self.add_hook("player_loaded", self.player_loaded) self.add_hook("game_countdown", self.game_countdown) self.add_hook("game_end", self.game_end) self.add_command("tomtec_versions", self.cmd_showversion) self.set_cvar_once("qlx_brandingPrependMapName", "0") self.set_cvar_once("qlx_brandingAppendGameType", "0") self.set_cvar_once("qlx_rainbowBrandName", "0") self.plugin_version = "2.1" self.playerConnectedYetList = [] def brand_map(self): if self.get_cvar("qlx_serverBrandName") == None: self.set_cvar("qlx_serverBrandName", self.game.map_title) if self.get_cvar("qlx_brandingPrependMapName", bool): topBranding = self.game.map_title + " " + self.get_cvar("qlx_serverBrandName") else: topBranding = self.get_cvar("qlx_serverBrandName") if self.get_cvar("qlx_brandingAppendGameType", bool): minqlx.set_configstring(3, topBranding + " " + self.game.type) else: minqlx.set_configstring(3, topBranding) if self.get_cvar("qlx_serverBrandTopField") != None: cs = self.game.map_subtitle1 if cs: cs += " - " minqlx.set_configstring(678, cs + (self.get_cvar("qlx_serverBrandTopField"))) if self.get_cvar("qlx_serverBrandBottomField") != None: cs = self.game.map_subtitle2 if cs: cs += " - " minqlx.set_configstring(679, cs + (self.get_cvar("qlx_serverBrandBottomField"))) if self.get_cvar("qlx_rainbowBrandName", bool): def rotating_colors(): i = 0 while True: res = (i % 7) + 1 i += 1 yield res map_name = self.clean_text(minqlx.get_configstring(3)) r = rotating_colors() res = "" for i in range(len(map_name)): res += "^{}{}".format(next(r), map_name[i]) minqlx.set_configstring(3, res) def player_connect(self, player): if self.get_cvar("qlx_connectMessage") != None: if player not in self.playerConnectedYetList: self.playerConnectedYetList.append(player) return "{}\n^7This server is running ^4branding.py^7. ^2http://github.com/tjone270/Quake-Live^7.\n".format(self.get_cvar("qlx_connectMessage")) def player_loaded(self, player): if self.get_cvar("qlx_loadedMessage") != None: self.center_print(self.get_cvar("qlx_loadedMessage"), player.id) try: self.playerConnectedYetList.remove(player) except: return def game_countdown(self): if self.get_cvar("qlx_countdownMessage") != None: self.center_print(self.get_cvar("qlx_countdownMessage")) def game_end(self, data): if self.get_cvar("qlx_endOfGameMessage") != None: self.msg(self.get_cvar("qlx_endOfGameMessage")) def cmd_showversion(self, player, msg, channel): channel.reply("^4branding.py^7 - version {}, created by Thomas Jones on 06/11/2015.".format(self.plugin_version))
true
true
f703dcb4a2d04a5dcb8b6f0e5e190bc6173390ca
13,974
py
Python
cartridge/project_template/project_name/settings.py
dsanders11/cartridge
d3a14da3fbbd0c36c589c2abef1bb3364ef28faf
[ "BSD-2-Clause" ]
null
null
null
cartridge/project_template/project_name/settings.py
dsanders11/cartridge
d3a14da3fbbd0c36c589c2abef1bb3364ef28faf
[ "BSD-2-Clause" ]
null
null
null
cartridge/project_template/project_name/settings.py
dsanders11/cartridge
d3a14da3fbbd0c36c589c2abef1bb3364ef28faf
[ "BSD-2-Clause" ]
null
null
null
import os from django.utils.translation import gettext_lazy as _ ###################### # CARTRIDGE SETTINGS # ###################### # The following settings are already defined in cartridge.shop.defaults # with default values, but are common enough to be put here, commented # out, for conveniently overriding. Please consult the settings # documentation for a full list of settings Cartridge implements: # http://cartridge.jupo.org/configuration.html#default-settings # Sequence of available credit card types for payment. # SHOP_CARD_TYPES = ("Mastercard", "Visa", "Diners", "Amex") # Setting to turn on featured images for shop categories. Defaults to False. # SHOP_CATEGORY_USE_FEATURED_IMAGE = True # If True, the checkout process is split into separate # billing/shipping and payment steps. # SHOP_CHECKOUT_STEPS_SPLIT = True # If True, the checkout process has a final confirmation step before # completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION = True # Controls the formatting of monetary values accord to the locale # module in the python standard library. If an empty string is # used, will fall back to the system's locale. # SHOP_CURRENCY_LOCALE = "" # Dotted package path and name of the function that # is called on submit of the billing/shipping checkout step. This # is where shipping calculation can be performed and set using the # function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING = \ # "cartridge.shop.checkout.default_billship_handler" # Dotted package path and name of the function that # is called once an order is successful and all of the order # object's data has been created. This is where any custom order # processing should be implemented. # SHOP_HANDLER_ORDER = "cartridge.shop.checkout.default_order_handler" # Dotted package path and name of the function that # is called on submit of the payment checkout step. This is where # integration with a payment gateway should be implemented. # SHOP_HANDLER_PAYMENT = "cartridge.shop.checkout.default_payment_handler" # Sequence of value/name pairs for order statuses. # SHOP_ORDER_STATUS_CHOICES = ( # (1, "Unprocessed"), # (2, "Processed"), # ) # Sequence of value/name pairs for types of product options, # eg Size, Colour. NOTE: Increasing the number of these will # require database migrations! # SHOP_OPTION_TYPE_CHOICES = ( # (1, "Size"), # (2, "Colour"), # ) # Sequence of indexes from the SHOP_OPTION_TYPE_CHOICES setting that # control how the options should be ordered in the admin, # eg for "Colour" then "Size" given the above: # SHOP_OPTION_ADMIN_ORDER = (2, 1) ###################### # MEZZANINE SETTINGS # ###################### # The following settings are already defined with default values in # the ``defaults.py`` module within each of Mezzanine's apps, but are # common enough to be put here, commented out, for conveniently # overriding. Please consult the settings documentation for a full list # of settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering and grouping of the admin menu. # # ADMIN_MENU_ORDER = ( # ("Content", ("pages.Page", "blog.BlogPost", # "generic.ThreadedComment", (_("Media Library"), "media-library"),)), # (_("Shop"), ("shop.Product", "shop.ProductOption", "shop.DiscountCode", # "shop.Sale", "shop.Order")), # ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")), # ("Users", ("auth.User", "auth.Group",)), # ) # A three item sequence, each containing a sequence of template tags # used to render the admin dashboard. # # DASHBOARD_TAGS = ( # ("blog_tags.quick_blog", "mezzanine_tags.app_list"), # ("comment_tags.recent_comments",), # ("mezzanine_tags.recent_actions",), # ) # A sequence of templates used by the ``page_menu`` template tag. Each # item in the sequence is a three item sequence, containing a unique ID # for the template, a label for the template, and the template path. # These templates are then available for selection when editing which # menus a page should appear in. Note that if a menu template is used # that doesn't appear in this setting, all pages will appear in it. # PAGE_MENU_TEMPLATES = ( # (1, _("Top navigation bar"), "pages/menus/dropdown.html"), # (2, _("Left-hand tree"), "pages/menus/tree.html"), # (3, _("Footer"), "pages/menus/footer.html"), # ) # A sequence of fields that will be injected into Mezzanine's (or any # library's) models. Each item in the sequence is a four item sequence. # The first two items are the dotted path to the model and its field # name to be added, and the dotted path to the field class to use for # the field. The third and fourth items are a sequence of positional # args and a dictionary of keyword args, to use when creating the # field instance. When specifying the field class, the path # ``django.models.db.`` can be omitted for regular Django model fields. # # EXTRA_MODEL_FIELDS = ( # ( # # Dotted path to field. # "mezzanine.blog.models.BlogPost.image", # # Dotted path to field class. # "somelib.fields.ImageField", # # Positional args for field class. # (_("Image"),), # # Keyword args for field class. # {"blank": True, "upload_to": "blog"}, # ), # # Example of adding a field to *all* of Mezzanine's content types: # ( # "mezzanine.pages.models.Page.another_field", # "IntegerField", # 'django.db.models.' is implied if path is omitted. # (_("Another name"),), # {"blank": True, "default": 1}, # ), # ) # Setting to turn on featured images for blog posts. Defaults to False. # # BLOG_USE_FEATURED_IMAGE = True # If True, the django-modeltranslation will be added to the # INSTALLED_APPS setting. USE_MODELTRANSLATION = False ######################## # MAIN DJANGO SETTINGS # ######################## # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ["localhost", "127.0.0.1"] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = "UTC" # If you set this to True, Django will use timezone-aware datetimes. USE_TZ = True # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = "en" # Supported languages LANGUAGES = (("en", _("English")),) # A boolean that turns on/off debug mode. When set to ``True``, stack traces # are displayed for error pages. Should always be set to ``False`` in # production. Best set to ``True`` in local_settings.py DEBUG = False # Whether a user's session cookie expires when the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = False AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",) # The numeric mode to set newly-uploaded files to. The value should be # a mode you'd pass directly to os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644 DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" ############# # DATABASES # ############# DATABASES = { "default": { # Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle". "ENGINE": "django.db.backends.", # DB name or path to database file if using sqlite3. "NAME": "", # Not used with sqlite3. "USER": "", # Not used with sqlite3. "PASSWORD": "", # Set to empty string for localhost. Not used with sqlite3. "HOST": "", # Set to empty string for default. Not used with sqlite3. "PORT": "", } } ######### # PATHS # ######### # Full filesystem path to the project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH) # Every cache key will get prefixed with this value - here we set it to # the name of the directory the project is in to try and use something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = "/static/" # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/")) # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = "/media/" # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = os.path.join(PROJECT_ROOT, MEDIA_URL.strip("/")) # Package/module name to import the root urlpatterns from for the project. ROOT_URLCONF = "%s.urls" % PROJECT_APP TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": [os.path.join(PROJECT_ROOT, "templates")], "OPTIONS": { "context_processors": [ "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", "django.template.context_processors.debug", "django.template.context_processors.i18n", "django.template.context_processors.static", "django.template.context_processors.media", "django.template.context_processors.request", "django.template.context_processors.tz", "mezzanine.conf.context_processors.settings", "mezzanine.pages.context_processors.page", ], "loaders": [ "mezzanine.template.loaders.host_themes.Loader", "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ], }, }, ] ################ # APPLICATIONS # ################ INSTALLED_APPS = ( "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.redirects", "django.contrib.sessions", "django.contrib.sites", "django.contrib.sitemaps", "django.contrib.messages", "django.contrib.staticfiles", "mezzanine.boot", "mezzanine.conf", "mezzanine.core", "mezzanine.generic", "mezzanine.pages", "cartridge.shop", "mezzanine.blog", "mezzanine.forms", "mezzanine.galleries", # "mezzanine.twitter", # "mezzanine.accounts", ) # List of middleware classes to use. Order is important; in the request phase, # these middleware classes will be applied in the order given, and in the # response phase the middleware will be applied in reverse order. MIDDLEWARE = ( "mezzanine.core.middleware.UpdateCacheMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", # Uncomment if using internationalisation or localisation # 'django.middleware.locale.LocaleMiddleware', "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "cartridge.shop.middleware.ShopMiddleware", "mezzanine.core.request.CurrentRequestMiddleware", "mezzanine.core.middleware.RedirectFallbackMiddleware", "mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware", "mezzanine.core.middleware.SitePermissionMiddleware", "mezzanine.pages.middleware.PageMiddleware", "mezzanine.core.middleware.FetchFromCacheMiddleware", ) # Store these package names here as they may change in the future since # at the moment we are using custom forks of them. PACKAGE_NAME_FILEBROWSER = "filebrowser_safe" PACKAGE_NAME_GRAPPELLI = "grappelli_safe" ######################### # OPTIONAL APPLICATIONS # ######################### # These will be added to ``INSTALLED_APPS``, only if available. OPTIONAL_APPS = ( "debug_toolbar", "django_extensions", "compressor", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################## # LOCAL SETTINGS # ################## # Allow any settings to be defined in local_settings.py which should be # ignored in your version control system allowing for settings to be # defined per machine. # Instead of doing "from .local_settings import *", we use exec so that # local_settings has full access to everything defined in this module. # Also force into sys.modules so it's visible to Django's autoreload. f = os.path.join(PROJECT_APP_PATH, "local_settings.py") if os.path.exists(f): import imp import sys module_name = "%s.local_settings" % PROJECT_APP module = imp.new_module(module_name) module.__file__ = f sys.modules[module_name] = module exec(open(f, "rb").read()) #################### # DYNAMIC SETTINGS # #################### # set_dynamic_settings() will rewrite globals based on what has been defined so far, in # order to provide some better defaults where applicable. try: from mezzanine.utils.conf import set_dynamic_settings except ImportError: pass else: set_dynamic_settings(globals())
35.467005
87
0.696508
import os from django.utils.translation import gettext_lazy as _ # SHOP_CURRENCY_LOCALE = "" # Dotted package path and name of the function that # is called on submit of the billing/shipping checkout step. This # is where shipping calculation can be performed and set using the # function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING = \ # "cartridge.shop.checkout.default_billship_handler" # Dotted package path and name of the function that # is called once an order is successful and all of the order # object's data has been created. This is where any custom order # common enough to be put here, commented out, for conveniently # overriding. Please consult the settings documentation for a full list # of settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering and grouping of the admin menu. # # ADMIN_MENU_ORDER = ( # ("Content", ("pages.Page", "blog.BlogPost", # "generic.ThreadedComment", (_("Media Library"), "media-library"),)), # (_("Shop"), ("shop.Product", "shop.ProductOption", "shop.DiscountCode", # "shop.Sale", "shop.Order")), # ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")), # ("Users", ("auth.User", "auth.Group",)), # ) # A three item sequence, each containing a sequence of template tags # used to render the admin dashboard. # # DASHBOARD_TAGS = ( # ("blog_tags.quick_blog", "mezzanine_tags.app_list"), # ("comment_tags.recent_comments",), # ("mezzanine_tags.recent_actions",), # ) # A sequence of templates used by the ``page_menu`` template tag. Each # item in the sequence is a three item sequence, containing a unique ID # for the template, a label for the template, and the template path. # These templates are then available for selection when editing which # menus a page should appear in. Note that if a menu template is used # that doesn't appear in this setting, all pages will appear in it. # library's) models. Each item in the sequence is a four item sequence. # ( # "mezzanine.pages.models.Page.another_field", # "IntegerField", # 'django.db.models.' is implied if path is omitted. # (_("Another name"),), # {"blank": True, "default": 1}, # ), # ) # Setting to turn on featured images for blog posts. Defaults to False. # # BLOG_USE_FEATURED_IMAGE = True # If True, the django-modeltranslation will be added to the # INSTALLED_APPS setting. USE_MODELTRANSLATION = False ######################## # MAIN DJANGO SETTINGS # ######################## # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ["localhost", "127.0.0.1"] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = "UTC" # If you set this to True, Django will use timezone-aware datetimes. USE_TZ = True # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = "en" # Supported languages LANGUAGES = (("en", _("English")),) # A boolean that turns on/off debug mode. When set to ``True``, stack traces # are displayed for error pages. Should always be set to ``False`` in # production. Best set to ``True`` in local_settings.py DEBUG = False # Whether a user's session cookie expires when the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID = 1 USE_I18N = False AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",) FILE_UPLOAD_PERMISSIONS = 0o644 DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" ############# # DATABASES # ############# DATABASES = { "default": { # Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle". "ENGINE": "django.db.backends.", # DB name or path to database file if using sqlite3. "NAME": "", # Not used with sqlite3. "USER": "", # Not used with sqlite3. "PASSWORD": "", # Set to empty string for localhost. Not used with sqlite3. "HOST": "", # Set to empty string for default. Not used with sqlite3. "PORT": "", } } ######### # PATHS # ######### # Full filesystem path to the project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH) # Every cache key will get prefixed with this value - here we set it to # the name of the directory the project is in to try and use something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = "/static/" # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/")) # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = "/media/" # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = os.path.join(PROJECT_ROOT, MEDIA_URL.strip("/")) # Package/module name to import the root urlpatterns from for the project. ROOT_URLCONF = "%s.urls" % PROJECT_APP TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": [os.path.join(PROJECT_ROOT, "templates")], "OPTIONS": { "context_processors": [ "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", "django.template.context_processors.debug", "django.template.context_processors.i18n", "django.template.context_processors.static", "django.template.context_processors.media", "django.template.context_processors.request", "django.template.context_processors.tz", "mezzanine.conf.context_processors.settings", "mezzanine.pages.context_processors.page", ], "loaders": [ "mezzanine.template.loaders.host_themes.Loader", "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ], }, }, ] ################ # APPLICATIONS # ################ INSTALLED_APPS = ( "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.redirects", "django.contrib.sessions", "django.contrib.sites", "django.contrib.sitemaps", "django.contrib.messages", "django.contrib.staticfiles", "mezzanine.boot", "mezzanine.conf", "mezzanine.core", "mezzanine.generic", "mezzanine.pages", "cartridge.shop", "mezzanine.blog", "mezzanine.forms", "mezzanine.galleries", # "mezzanine.twitter", # "mezzanine.accounts", ) # List of middleware classes to use. Order is important; in the request phase, # these middleware classes will be applied in the order given, and in the # response phase the middleware will be applied in reverse order. MIDDLEWARE = ( "mezzanine.core.middleware.UpdateCacheMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", # Uncomment if using internationalisation or localisation # 'django.middleware.locale.LocaleMiddleware', "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "cartridge.shop.middleware.ShopMiddleware", "mezzanine.core.request.CurrentRequestMiddleware", "mezzanine.core.middleware.RedirectFallbackMiddleware", "mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware", "mezzanine.core.middleware.SitePermissionMiddleware", "mezzanine.pages.middleware.PageMiddleware", "mezzanine.core.middleware.FetchFromCacheMiddleware", ) # Store these package names here as they may change in the future since # at the moment we are using custom forks of them. PACKAGE_NAME_FILEBROWSER = "filebrowser_safe" PACKAGE_NAME_GRAPPELLI = "grappelli_safe" ######################### # OPTIONAL APPLICATIONS # ######################### # These will be added to ``INSTALLED_APPS``, only if available. OPTIONAL_APPS = ( "debug_toolbar", "django_extensions", "compressor", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################## # LOCAL SETTINGS # ################## # Allow any settings to be defined in local_settings.py which should be # ignored in your version control system allowing for settings to be # defined per machine. # Instead of doing "from .local_settings import *", we use exec so that # local_settings has full access to everything defined in this module. # Also force into sys.modules so it's visible to Django's autoreload. f = os.path.join(PROJECT_APP_PATH, "local_settings.py") if os.path.exists(f): import imp import sys module_name = "%s.local_settings" % PROJECT_APP module = imp.new_module(module_name) module.__file__ = f sys.modules[module_name] = module exec(open(f, "rb").read()) #################### # DYNAMIC SETTINGS # #################### # set_dynamic_settings() will rewrite globals based on what has been defined so far, in # order to provide some better defaults where applicable. try: from mezzanine.utils.conf import set_dynamic_settings except ImportError: pass else: set_dynamic_settings(globals())
true
true
f703dd557ff0c6601e9b1327d30fb5ee5f8ded6a
3,770
py
Python
main/hit_object.py
MBmasher/weighted-object
eaaf25338240873b7c4197097b2bb73be256b702
[ "MIT" ]
null
null
null
main/hit_object.py
MBmasher/weighted-object
eaaf25338240873b7c4197097b2bb73be256b702
[ "MIT" ]
null
null
null
main/hit_object.py
MBmasher/weighted-object
eaaf25338240873b7c4197097b2bb73be256b702
[ "MIT" ]
null
null
null
# HitObject class class HitObject: def __init__(self, start_x, start_y, end_x, end_y, time, object_type): self.start_x = start_x self.start_y = start_y self.end_x = end_x self.end_y = end_y self.time = time self.object_type = object_type # hit_circle, even_repeat_slider, odd_repeat_slider, spinner # Finds the line number in which the hit objects start. def find_start(lines): line_number = 0 for x in lines: if x == "[HitObjects]": return line_number + 1 line_number += 1 # Converts a line from .osu file into HitObject. def convert_hit_object(line): split_line = line.split(",") start_x = int(split_line[0]) start_y = int(split_line[1]) end_x = int(split_line[0]) end_y = int(split_line[1]) time = int(split_line[2]) if int(split_line[3]) & 0b1: object_type = "hit_circle" elif int(split_line[3]) & 0b1000: object_type = "spinner" elif int(split_line[6]) % 2 == 0: object_type = "even_repeat_slider" else: object_type = "odd_repeat_slider" slider_point_list = split_line[5].split("|") end_point = slider_point_list[-1].split(":") end_x = int(end_point[0]) end_y = int(end_point[1]) return HitObject(start_x, start_y, end_x, end_y, time, object_type) # Finds distance snap by multiplying distance and time of two objects. def calculate_distance_snap(first_object, second_object): first_x = first_object.end_x first_y = first_object.end_y first_time = first_object.time second_x = second_object.start_x second_y = second_object.start_y second_time = second_object.time difference_x = abs(first_x - second_x) difference_y = abs(first_y - second_y) difference_time = second_time - first_time calculation_time = difference_time if difference_time < 100: # 2x bonus for objects unsingletappable (Detected as streams) calculation_time = difference_time / 2.0 elif difference_time < 120: # For the grey spot around 300bpm which can be either jumps or streams. calculation_time = difference_time / (((120 - difference_time) ** 2) / 400.0 + 1) calculation_time = 1.0 / calculation_time # 1/time has to be used for calculation as smaller time difference means bigger distance snap. distance = (difference_x ** 2 + difference_y ** 2) ** 0.5 return distance * calculation_time # Calculates weighting of objects. def calculate_weighting(average_distance, max_distance, distance_snap): second_half = max_distance - average_distance # used to calculate distance snap above the average if distance_snap < average_distance: raw_weight = (distance_snap / average_distance) / 2.0 # this is the raw weighting, range from 0 to 1 # if distance snap is under the average, put it somewhere between 0 and 0.5 else: raw_weight = ((distance_snap - average_distance) / second_half) / 2.0 + 0.5 # if distance snap is above average, put it somewhere between 0.5 and 1 # spacing below ~0.67 is weighted just as much as spacing above it, so only relatively # BIG jumps will make much of a difference print (raw_weight * 1.5) ** 1.7 return (raw_weight * 1.5) ** 1.7 # Calculates nerf/buff based on percentage change from old objects. def calculate_percentage_change(old_percentage): if old_percentage < 0.65: # Nerf all maps which reach under 65%. # 55% would get around 5% nerf, while 50% would get around 10% nerf. return 1 - (((0.65 - old_percentage) ** 1.5) / 0.524) else: return 1
35.233645
110
0.659947
class HitObject: def __init__(self, start_x, start_y, end_x, end_y, time, object_type): self.start_x = start_x self.start_y = start_y self.end_x = end_x self.end_y = end_y self.time = time self.object_type = object_type def find_start(lines): line_number = 0 for x in lines: if x == "[HitObjects]": return line_number + 1 line_number += 1 def convert_hit_object(line): split_line = line.split(",") start_x = int(split_line[0]) start_y = int(split_line[1]) end_x = int(split_line[0]) end_y = int(split_line[1]) time = int(split_line[2]) if int(split_line[3]) & 0b1: object_type = "hit_circle" elif int(split_line[3]) & 0b1000: object_type = "spinner" elif int(split_line[6]) % 2 == 0: object_type = "even_repeat_slider" else: object_type = "odd_repeat_slider" slider_point_list = split_line[5].split("|") end_point = slider_point_list[-1].split(":") end_x = int(end_point[0]) end_y = int(end_point[1]) return HitObject(start_x, start_y, end_x, end_y, time, object_type) def calculate_distance_snap(first_object, second_object): first_x = first_object.end_x first_y = first_object.end_y first_time = first_object.time second_x = second_object.start_x second_y = second_object.start_y second_time = second_object.time difference_x = abs(first_x - second_x) difference_y = abs(first_y - second_y) difference_time = second_time - first_time calculation_time = difference_time if difference_time < 100: calculation_time = difference_time / 2.0 elif difference_time < 120: calculation_time = difference_time / (((120 - difference_time) ** 2) / 400.0 + 1) calculation_time = 1.0 / calculation_time distance = (difference_x ** 2 + difference_y ** 2) ** 0.5 return distance * calculation_time def calculate_weighting(average_distance, max_distance, distance_snap): second_half = max_distance - average_distance if distance_snap < average_distance: raw_weight = (distance_snap / average_distance) / 2.0 else: raw_weight = ((distance_snap - average_distance) / second_half) / 2.0 + 0.5 print (raw_weight * 1.5) ** 1.7 return (raw_weight * 1.5) ** 1.7 def calculate_percentage_change(old_percentage): if old_percentage < 0.65: return 1 - (((0.65 - old_percentage) ** 1.5) / 0.524) else: return 1
true
true
f703ddc91411d775bc61ad267063c63f00cf2580
20,786
py
Python
src/sage/schemes/elliptic_curves/weierstrass_morphism.py
bopopescu/classic_diff_geom
2b1d88becbc8cb30962e0995cc78e429e0f5589f
[ "BSL-1.0" ]
null
null
null
src/sage/schemes/elliptic_curves/weierstrass_morphism.py
bopopescu/classic_diff_geom
2b1d88becbc8cb30962e0995cc78e429e0f5589f
[ "BSL-1.0" ]
null
null
null
src/sage/schemes/elliptic_curves/weierstrass_morphism.py
bopopescu/classic_diff_geom
2b1d88becbc8cb30962e0995cc78e429e0f5589f
[ "BSL-1.0" ]
1
2020-07-24T12:08:30.000Z
2020-07-24T12:08:30.000Z
r""" Isomorphisms between Weierstrass models of elliptic curves AUTHORS: - Robert Bradshaw (2007): initial version - John Cremona (Jan 2008): isomorphisms, automorphisms and twists in all characteristics """ #***************************************************************************** # Copyright (C) 2007 Robert Bradshaw <robertwb@math.washington.edu> # # Distributed under the terms of the GNU General Public License (GPL) # # This code is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # The full text of the GPL is available at: # # http://www.gnu.org/licenses/ #***************************************************************************** from sage.categories.morphism import Morphism from constructor import EllipticCurve from sage.categories.homset import Hom class baseWI: r""" This class implements the basic arithmetic of isomorphisms between Weierstrass models of elliptic curves. These are specified by lists of the form `[u,r,s,t]` (with `u\not=0`) which specifies a transformation `(x,y) \mapsto (x',y')` where `(x,y) = (u^2x'+r , u^3y' + su^2x' + t).` INPUT: - ``u,r,s,t`` (default (1,0,0,0)) -- standard parameters of an isomorphism between Weierstrass models. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: baseWI() (1, 0, 0, 0) sage: baseWI(2,3,4,5) (2, 3, 4, 5) sage: R.<u,r,s,t>=QQ[]; baseWI(u,r,s,t) (u, r, s, t) """ def __init__(self, u=1, r=0, s=0, t=0): r""" Constructor: check for valid parameters (defaults to identity) INPUT: - ``u,r,s,t`` (default (1,0,0,0)) -- standard parameters of an isomorphism between Weierstrass models. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: baseWI() (1, 0, 0, 0) sage: baseWI(2,3,4,5) (2, 3, 4, 5) sage: R.<u,r,s,t>=QQ[]; baseWI(u,r,s,t) (u, r, s, t) """ if u==0: raise ValueError("u!=0 required for baseWI") self.u=u; self.r=r; self.s=s; self.t=t def __cmp__(self, other): """ Standard comparison function. The ordering is just lexicographic on the tuple `(u,r,s,t)`. .. note:: In a list of automorphisms, there is no guarantee that the identity will be first! EXAMPLE:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: baseWI(1,2,3,4)==baseWI(1,2,3,4) True sage: baseWI(1,2,3,4)<baseWI(1,2,3,5) True sage: baseWI(1,2,3,4)>baseWI(1,2,3,4) False :: It will never return equality if other is of another type: sage: baseWI() == 1 False """ if not isinstance(other, baseWI): return cmp(type(self), type(other)) return cmp(self.tuple(), other.tuple()) def tuple(self): r""" Returns the parameters `u,r,s,t` as a tuple. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: u,r,s,t=baseWI(2,3,4,5).tuple() sage: w=baseWI(2,3,4,5) sage: u,r,s,t=w.tuple() sage: u 2 """ return (self.u,self.r,self.s,self.t) def __mul__(self, other): r""" Returns the Composition of this isomorphism and another. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: baseWI(1,2,3,4)*baseWI(5,6,7,8) (5, 56, 22, 858) sage: baseWI()*baseWI(1,2,3,4)*baseWI() (1, 2, 3, 4) """ u1,r1,s1,t1=other.tuple() u2,r2,s2,t2=self.tuple() return baseWI(u1*u2,(u1**2)*r2+r1,u1*s2+s1,(u1**3)*t2+s1*(u1**2)*r2+t1) def __invert__(self): r""" Returns the inverse of this isomorphism. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: w=baseWI(2,3,4,5) sage: ~w (1/2, -3/4, -2, 7/8) sage: w*~w (1, 0, 0, 0) sage: ~w*w (1, 0, 0, 0) sage: R.<u,r,s,t>=QQ[]; w=baseWI(u,r,s,t) sage: ~w (1/u, (-r)/u^2, (-s)/u, (r*s - t)/u^3) sage: ~w*w (1, 0, 0, 0) """ u,r,s,t=self.tuple() return baseWI(1/u,-r/(u**2),-s/u,(r*s-t)/(u**3)) def __repr__(self): r""" Returns the string representation of this isomorphism. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: baseWI(2,3,4,5) (2, 3, 4, 5) """ return self.tuple().__repr__() def is_identity(self): r""" Returns True if this is the identity isomorphism. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: w=baseWI(); w.is_identity() True sage: w=baseWI(2,3,4,5); w.is_identity() False """ return self.tuple()==(1,0,0,0) def __call__(self, EorP): r""" Base application of isomorphisms to curves and points: a baseWI `w` may be applied to a list `[a1,a2,a3,a4,a6]` representing the `a`-invariants of an elliptic curve `E`, returning the `a`-invariants of `w(E)`; or to `P=[x,y]` or `P=[x,y,z]` representing a point in `\mathbb{A}^2` or `\mathbb{P}^2`, returning the transformed point. INPUT: - ``EorP`` -- either an elliptic curve, or a point on an elliptic curve. OUTPUT: The transformed curve or point. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: E=EllipticCurve([0,0,1,-7,6]) sage: w=baseWI(2,3,4,5); sage: w(E.ainvs()) [4, -7/4, 11/8, -3/2, -9/32] sage: P=E(-2,3) sage: w(P.xy()) [-5/4, 9/4] sage: EllipticCurve(w(E.ainvs()))(w(P.xy())) (-5/4 : 9/4 : 1) """ u,r,s,t=self.tuple() if len(EorP)==5: a1,a2,a3,a4,a6=EorP a6 += r*(a4 + r*(a2 + r)) - t*(a3 + r*a1 + t); a4 += -s*a3 + 2*r*a2 - (t + r*s)*a1 + 3*r*r - 2*s*t; a3 += r*a1 +t+t; a2 += -s*a1 + 3*r - s*s; a1 += 2*s; return [a1/u,a2/u**2,a3/u**3,a4/u**4,a6/u**6] if len(EorP)==2: x,y=EorP x-=r y-=(s*x+t) return [x/u**2,y/u**3] if len(EorP)==3: x,y,z=EorP x-=r*z y-=(s*x+t*z) return [x/u**2,y/u**3,z] raise ValueError("baseWI(a) only for a=(x,y), (x:y:z) or (a1,a2,a3,a4,a6)") def isomorphisms(E,F,JustOne=False): r""" Returns one or all isomorphisms between two elliptic curves. INPUT: - ``E``, ``F`` (EllipticCurve) -- Two elliptic curves. - ``JustOne`` (bool) If True, returns one isomorphism, or None if the curves are not isomorphic. If False, returns a (possibly empty) list of isomorphisms. OUTPUT: Either None, or a 4-tuple `(u,r,s,t)` representing an isomorphism, or a list of these. .. note:: This function is not intended for users, who should use the interface provided by ``ell_generic``. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: isomorphisms(EllipticCurve_from_j(0),EllipticCurve('27a3')) [(-1, 0, 0, -1), (1, 0, 0, 0)] sage: isomorphisms(EllipticCurve_from_j(0),EllipticCurve('27a3'),JustOne=True) (1, 0, 0, 0) sage: isomorphisms(EllipticCurve_from_j(0),EllipticCurve('27a1')) [] sage: isomorphisms(EllipticCurve_from_j(0),EllipticCurve('27a1'),JustOne=True) """ from ell_generic import is_EllipticCurve if not is_EllipticCurve(E) or not is_EllipticCurve(F): raise ValueError("arguments are not elliptic curves") K = E.base_ring() # if not K == F.base_ring(): return [] j=E.j_invariant() if j != F.j_invariant(): if JustOne: return None return [] from sage.rings.all import PolynomialRing x=PolynomialRing(K,'x').gen() a1E, a2E, a3E, a4E, a6E = E.ainvs() a1F, a2F, a3F, a4F, a6F = F.ainvs() char=K.characteristic() if char==2: if j==0: ulist=(x**3-(a3E/a3F)).roots(multiplicities=False) ans=[] for u in ulist: slist=(x**4+a3E*x+(a2F**2+a4F)*u**4+a2E**2+a4E).roots(multiplicities=False) for s in slist: r=s**2+a2E+a2F*u**2 tlist= (x**2 + a3E*x + r**3 + a2E*r**2 + a4E*r + a6E + a6F*u**6).roots(multiplicities=False) for t in tlist: if JustOne: return (u,r,s,t) ans.append((u,r,s,t)) if JustOne: return None ans.sort() return ans else: ans=[] u=a1E/a1F r=(a3E+a3F*u**3)/a1E slist=[s[0] for s in (x**2+a1E*x+(r+a2E+a2F*u**2)).roots()] for s in slist: t = (a4E+a4F*u**4 + s*a3E + r*s*a1E + r**2) if JustOne: return (u,r,s,t) ans.append((u,r,s,t)) if JustOne: return None ans.sort() return ans b2E, b4E, b6E, b8E = E.b_invariants() b2F, b4F, b6F, b8F = F.b_invariants() if char==3: if j==0: ulist=(x**4-(b4E/b4F)).roots(multiplicities=False) ans=[] for u in ulist: s=a1E-a1F*u t=a3E-a3F*u**3 rlist=(x**3-b4E*x+(b6E-b6F*u**6)).roots(multiplicities=False) for r in rlist: if JustOne: return (u,r,s,t+r*a1E) ans.append((u,r,s,t+r*a1E)) if JustOne: return None ans.sort() return ans else: ulist=(x**2-(b2E/b2F)).roots(multiplicities=False) ans=[] for u in ulist: r = (b4F*u**4 -b4E)/b2E s = (a1E-a1F*u) t = (a3E-a3F*u**3 + a1E*r) if JustOne: return (u,r,s,t) ans.append((u,r,s,t)) if JustOne: return None ans.sort() return ans # now char!=2,3: c4E,c6E = E.c_invariants() c4F,c6F = F.c_invariants() if j==0: m,um = 6,c6E/c6F elif j==1728: m,um=4,c4E/c4F else: m,um=2,(c6E*c4F)/(c6F*c4E) ulist=(x**m-um).roots(multiplicities=False) ans=[] for u in ulist: s = (a1F*u - a1E)/2 r = (a2F*u**2 + a1E*s + s**2 - a2E)/3 t = (a3F*u**3 - a1E*r - a3E)/2 if JustOne: return (u,r,s,t) ans.append((u,r,s,t)) if JustOne: return None ans.sort() return ans class WeierstrassIsomorphism(baseWI,Morphism): r""" Class representing a Weierstrass isomorphism between two elliptic curves. """ def __init__(self, E=None, urst=None, F=None): r""" Constructor for WeierstrassIsomorphism class, INPUT: - ``E`` -- an EllipticCurve, or None (see below). - ``urst`` -- a 4-tuple `(u,r,s,t)`, or None (see below). - ``F`` -- an EllipticCurve, or None (see below). Given two Elliptic Curves ``E`` and ``F`` (represented by Weierstrass models as usual), and a transformation ``urst`` from ``E`` to ``F``, construct an isomorphism from ``E`` to ``F``. An exception is raised if ``urst(E)!=F``. At most one of ``E``, ``F``, ``urst`` can be None. If ``F==None`` then ``F`` is constructed as ``urst(E)``. If ``E==None`` then ``E`` is constructed as ``urst^-1(F)``. If ``urst==None`` then an isomorphism from ``E`` to ``F`` is constructed if possible, and an exception is raised if they are not isomorphic. Otherwise ``urst`` can be a tuple of length 4 or a object of type ``baseWI``. Users will not usually need to use this class directly, but instead use methods such as ``isomorphism`` of elliptic curves. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: WeierstrassIsomorphism(EllipticCurve([0,1,2,3,4]),(-1,2,3,4)) Generic morphism: From: Abelian group of points on Elliptic Curve defined by y^2 + 2*y = x^3 + x^2 + 3*x + 4 over Rational Field To: Abelian group of points on Elliptic Curve defined by y^2 - 6*x*y - 10*y = x^3 - 2*x^2 - 11*x - 2 over Rational Field Via: (u,r,s,t) = (-1, 2, 3, 4) sage: E=EllipticCurve([0,1,2,3,4]) sage: F=EllipticCurve(E.cremona_label()) sage: WeierstrassIsomorphism(E,None,F) Generic morphism: From: Abelian group of points on Elliptic Curve defined by y^2 + 2*y = x^3 + x^2 + 3*x + 4 over Rational Field To: Abelian group of points on Elliptic Curve defined by y^2 = x^3 + x^2 + 3*x + 5 over Rational Field Via: (u,r,s,t) = (1, 0, 0, -1) sage: w=WeierstrassIsomorphism(None,(1,0,0,-1),F) sage: w._domain_curve==E True """ from ell_generic import is_EllipticCurve if E!=None: if not is_EllipticCurve(E): raise ValueError("First argument must be an elliptic curve or None") if F!=None: if not is_EllipticCurve(F): raise ValueError("Third argument must be an elliptic curve or None") if urst!=None: if len(urst)!=4: raise ValueError("Second argument must be [u,r,s,t] or None") if len([par for par in [E,urst,F] if par!=None])<2: raise ValueError("At most 1 argument can be None") if F==None: # easy case baseWI.__init__(self,*urst) F=EllipticCurve(baseWI.__call__(self,list(E.a_invariants()))) Morphism.__init__(self, Hom(E(0).parent(), F(0).parent())) self._domain_curve = E self._codomain_curve = F return if E==None: # easy case in reverse baseWI.__init__(self,*urst) inv_urst=baseWI.__invert__(self) E=EllipticCurve(baseWI.__call__(inv_urst,list(F.a_invariants()))) Morphism.__init__(self, Hom(E(0).parent(), F(0).parent())) self._domain_curve = E self._codomain_curve = F return if urst==None: # try to construct the morphism urst=isomorphisms(E,F,True) if urst==None: raise ValueError("Elliptic curves not isomorphic.") baseWI.__init__(self, *urst) Morphism.__init__(self, Hom(E(0).parent(), F(0).parent())) self._domain_curve = E self._codomain_curve = F return # none of the parameters is None: baseWI.__init__(self,*urst) if F!=EllipticCurve(baseWI.__call__(self,list(E.a_invariants()))): raise ValueError("second argument is not an isomorphism from first argument to third argument") else: Morphism.__init__(self, Hom(E(0).parent(), F(0).parent())) self._domain_curve = E self._codomain_curve = F return def __cmp__(self, other): r""" Standard comparison function for the WeierstrassIsomorphism class. EXAMPLE:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: E=EllipticCurve('389a1') sage: F=E.change_weierstrass_model(1,2,3,4) sage: w1=E.isomorphism_to(F) sage: w1==w1 True sage: w2 = F.automorphisms()[0] *w1 sage: w1==w2 False :: sage: E=EllipticCurve_from_j(GF(7)(0)) sage: F=E.change_weierstrass_model(2,3,4,5) sage: a=E.isomorphisms(F) sage: b=[w*a[0] for w in F.automorphisms()] sage: b.sort() sage: a==b True sage: c=[a[0]*w for w in E.automorphisms()] sage: c.sort() sage: a==c True """ if not isinstance(other, WeierstrassIsomorphism): return cmp(type(self), type(other)) t = cmp(self._domain_curve, other._domain_curve) if t: return t t = cmp(self._codomain_curve, other._codomain_curve) if t: return t return baseWI.__cmp__(self,other) def __call__(self, P): r""" Call function for WeierstrassIsomorphism class. INPUT: - ``P`` (Point) -- a point on the domain curve. OUTPUT: (Point) the transformed point on the codomain curve. EXAMPLES:: sage: from sage.schemes.elliptic_curves.weierstrass_morphism import * sage: E=EllipticCurve('37a1') sage: w=WeierstrassIsomorphism(E,(2,3,4,5)) sage: P=E(0,-1) sage: w(P) (-3/4 : 3/4 : 1) sage: w(P).curve()==E.change_weierstrass_model((2,3,4,5)) True """ if P[2] == 0: return self._codomain_curve(0) else: return self._codomain_curve.point(baseWI.__call__(self,tuple(P._coords)), check=False) def __invert__(self): r""" Returns the inverse of this WeierstrassIsomorphism. EXAMPLES:: sage: E = EllipticCurve('5077') sage: F = E.change_weierstrass_model([2,3,4,5]); F Elliptic Curve defined by y^2 + 4*x*y + 11/8*y = x^3 - 7/4*x^2 - 3/2*x - 9/32 over Rational Field sage: w = E.isomorphism_to(F) sage: P = E(-2,3,1) sage: w(P) (-5/4 : 9/4 : 1) sage: ~w Generic morphism: From: Abelian group of points on Elliptic Curve defined by y^2 + 4*x*y + 11/8*y = x^3 - 7/4*x^2 - 3/2*x - 9/32 over Rational Field To: Abelian group of points on Elliptic Curve defined by y^2 + y = x^3 - 7*x + 6 over Rational Field Via: (u,r,s,t) = (1/2, -3/4, -2, 7/8) sage: Q = w(P); Q (-5/4 : 9/4 : 1) sage: (~w)(Q) (-2 : 3 : 1) """ winv=baseWI.__invert__(self).tuple() return WeierstrassIsomorphism(self._codomain_curve, winv, self._domain_curve) def __mul__(self,other): r""" Returns the composition of this WeierstrassIsomorphism and the other, WeierstrassMorphisms can be composed using ``*`` if the codomain & domain match: `(w1*w2)(X)=w1(w2(X))`, so we require ``w1.domain()==w2.codomain()``. EXAMPLES:: sage: E1 = EllipticCurve('5077') sage: E2 = E1.change_weierstrass_model([2,3,4,5]) sage: w1 = E1.isomorphism_to(E2) sage: E3 = E2.change_weierstrass_model([6,7,8,9]) sage: w2 = E2.isomorphism_to(E3) sage: P = E1(-2,3,1) sage: (w2*w1)(P)==w2(w1(P)) True """ if self._domain_curve==other._codomain_curve: w=baseWI.__mul__(self,other) return WeierstrassIsomorphism(other._domain_curve, w.tuple(), self._codomain_curve) else: raise ValueError("Domain of first argument must equal codomain of second") def __repr__(self): r""" Returns the string representation of this WeierstrassIsomorphism. OUTPUT: (string) The underlying morphism, together with an extra line showing the `(u,r,s,t)` parameters. EXAMPLES:: sage: E1 = EllipticCurve('5077') sage: E2 = E1.change_weierstrass_model([2,3,4,5]) sage: E1.isomorphism_to(E2) Generic morphism: From: Abelian group of points on Elliptic Curve defined by y^2 + y = x^3 - 7*x + 6 over Rational Field To: Abelian group of points on Elliptic Curve defined by y^2 + 4*x*y + 11/8*y = x^3 - 7/4*x^2 - 3/2*x - 9/32 over Rational Field Via: (u,r,s,t) = (2, 3, 4, 5) """ return Morphism.__repr__(self)+"\n Via: (u,r,s,t) = "+baseWI.__repr__(self)
33.964052
150
0.531415
from sage.categories.morphism import Morphism from constructor import EllipticCurve from sage.categories.homset import Hom class baseWI: def __init__(self, u=1, r=0, s=0, t=0): if u==0: raise ValueError("u!=0 required for baseWI") self.u=u; self.r=r; self.s=s; self.t=t def __cmp__(self, other): if not isinstance(other, baseWI): return cmp(type(self), type(other)) return cmp(self.tuple(), other.tuple()) def tuple(self): return (self.u,self.r,self.s,self.t) def __mul__(self, other): u1,r1,s1,t1=other.tuple() u2,r2,s2,t2=self.tuple() return baseWI(u1*u2,(u1**2)*r2+r1,u1*s2+s1,(u1**3)*t2+s1*(u1**2)*r2+t1) def __invert__(self): u,r,s,t=self.tuple() return baseWI(1/u,-r/(u**2),-s/u,(r*s-t)/(u**3)) def __repr__(self): return self.tuple().__repr__() def is_identity(self): return self.tuple()==(1,0,0,0) def __call__(self, EorP): u,r,s,t=self.tuple() if len(EorP)==5: a1,a2,a3,a4,a6=EorP a6 += r*(a4 + r*(a2 + r)) - t*(a3 + r*a1 + t); a4 += -s*a3 + 2*r*a2 - (t + r*s)*a1 + 3*r*r - 2*s*t; a3 += r*a1 +t+t; a2 += -s*a1 + 3*r - s*s; a1 += 2*s; return [a1/u,a2/u**2,a3/u**3,a4/u**4,a6/u**6] if len(EorP)==2: x,y=EorP x-=r y-=(s*x+t) return [x/u**2,y/u**3] if len(EorP)==3: x,y,z=EorP x-=r*z y-=(s*x+t*z) return [x/u**2,y/u**3,z] raise ValueError("baseWI(a) only for a=(x,y), (x:y:z) or (a1,a2,a3,a4,a6)") def isomorphisms(E,F,JustOne=False): from ell_generic import is_EllipticCurve if not is_EllipticCurve(E) or not is_EllipticCurve(F): raise ValueError("arguments are not elliptic curves") K = E.base_ring() j=E.j_invariant() if j != F.j_invariant(): if JustOne: return None return [] from sage.rings.all import PolynomialRing x=PolynomialRing(K,'x').gen() a1E, a2E, a3E, a4E, a6E = E.ainvs() a1F, a2F, a3F, a4F, a6F = F.ainvs() char=K.characteristic() if char==2: if j==0: ulist=(x**3-(a3E/a3F)).roots(multiplicities=False) ans=[] for u in ulist: slist=(x**4+a3E*x+(a2F**2+a4F)*u**4+a2E**2+a4E).roots(multiplicities=False) for s in slist: r=s**2+a2E+a2F*u**2 tlist= (x**2 + a3E*x + r**3 + a2E*r**2 + a4E*r + a6E + a6F*u**6).roots(multiplicities=False) for t in tlist: if JustOne: return (u,r,s,t) ans.append((u,r,s,t)) if JustOne: return None ans.sort() return ans else: ans=[] u=a1E/a1F r=(a3E+a3F*u**3)/a1E slist=[s[0] for s in (x**2+a1E*x+(r+a2E+a2F*u**2)).roots()] for s in slist: t = (a4E+a4F*u**4 + s*a3E + r*s*a1E + r**2) if JustOne: return (u,r,s,t) ans.append((u,r,s,t)) if JustOne: return None ans.sort() return ans b2E, b4E, b6E, b8E = E.b_invariants() b2F, b4F, b6F, b8F = F.b_invariants() if char==3: if j==0: ulist=(x**4-(b4E/b4F)).roots(multiplicities=False) ans=[] for u in ulist: s=a1E-a1F*u t=a3E-a3F*u**3 rlist=(x**3-b4E*x+(b6E-b6F*u**6)).roots(multiplicities=False) for r in rlist: if JustOne: return (u,r,s,t+r*a1E) ans.append((u,r,s,t+r*a1E)) if JustOne: return None ans.sort() return ans else: ulist=(x**2-(b2E/b2F)).roots(multiplicities=False) ans=[] for u in ulist: r = (b4F*u**4 -b4E)/b2E s = (a1E-a1F*u) t = (a3E-a3F*u**3 + a1E*r) if JustOne: return (u,r,s,t) ans.append((u,r,s,t)) if JustOne: return None ans.sort() return ans c4E,c6E = E.c_invariants() c4F,c6F = F.c_invariants() if j==0: m,um = 6,c6E/c6F elif j==1728: m,um=4,c4E/c4F else: m,um=2,(c6E*c4F)/(c6F*c4E) ulist=(x**m-um).roots(multiplicities=False) ans=[] for u in ulist: s = (a1F*u - a1E)/2 r = (a2F*u**2 + a1E*s + s**2 - a2E)/3 t = (a3F*u**3 - a1E*r - a3E)/2 if JustOne: return (u,r,s,t) ans.append((u,r,s,t)) if JustOne: return None ans.sort() return ans class WeierstrassIsomorphism(baseWI,Morphism): def __init__(self, E=None, urst=None, F=None): from ell_generic import is_EllipticCurve if E!=None: if not is_EllipticCurve(E): raise ValueError("First argument must be an elliptic curve or None") if F!=None: if not is_EllipticCurve(F): raise ValueError("Third argument must be an elliptic curve or None") if urst!=None: if len(urst)!=4: raise ValueError("Second argument must be [u,r,s,t] or None") if len([par for par in [E,urst,F] if par!=None])<2: raise ValueError("At most 1 argument can be None") if F==None: baseWI.__init__(self,*urst) F=EllipticCurve(baseWI.__call__(self,list(E.a_invariants()))) Morphism.__init__(self, Hom(E(0).parent(), F(0).parent())) self._domain_curve = E self._codomain_curve = F return if E==None: baseWI.__init__(self,*urst) inv_urst=baseWI.__invert__(self) E=EllipticCurve(baseWI.__call__(inv_urst,list(F.a_invariants()))) Morphism.__init__(self, Hom(E(0).parent(), F(0).parent())) self._domain_curve = E self._codomain_curve = F return if urst==None: urst=isomorphisms(E,F,True) if urst==None: raise ValueError("Elliptic curves not isomorphic.") baseWI.__init__(self, *urst) Morphism.__init__(self, Hom(E(0).parent(), F(0).parent())) self._domain_curve = E self._codomain_curve = F return baseWI.__init__(self,*urst) if F!=EllipticCurve(baseWI.__call__(self,list(E.a_invariants()))): raise ValueError("second argument is not an isomorphism from first argument to third argument") else: Morphism.__init__(self, Hom(E(0).parent(), F(0).parent())) self._domain_curve = E self._codomain_curve = F return def __cmp__(self, other): if not isinstance(other, WeierstrassIsomorphism): return cmp(type(self), type(other)) t = cmp(self._domain_curve, other._domain_curve) if t: return t t = cmp(self._codomain_curve, other._codomain_curve) if t: return t return baseWI.__cmp__(self,other) def __call__(self, P): if P[2] == 0: return self._codomain_curve(0) else: return self._codomain_curve.point(baseWI.__call__(self,tuple(P._coords)), check=False) def __invert__(self): winv=baseWI.__invert__(self).tuple() return WeierstrassIsomorphism(self._codomain_curve, winv, self._domain_curve) def __mul__(self,other): if self._domain_curve==other._codomain_curve: w=baseWI.__mul__(self,other) return WeierstrassIsomorphism(other._domain_curve, w.tuple(), self._codomain_curve) else: raise ValueError("Domain of first argument must equal codomain of second") def __repr__(self): return Morphism.__repr__(self)+"\n Via: (u,r,s,t) = "+baseWI.__repr__(self)
true
true
f703de5e7d7acf281786e68b60e43bb7dc901b58
3,336
py
Python
conf/configs/deploy.py
HeliumEdu/template-project
f5da3f35d6aa4ba4c3d162e59d195720e8c2101b
[ "MIT" ]
2
2018-12-22T19:09:04.000Z
2019-01-23T19:09:31.000Z
conf/configs/deploy.py
HeliumEdu/template-project
f5da3f35d6aa4ba4c3d162e59d195720e8c2101b
[ "MIT" ]
20
2019-07-03T20:56:42.000Z
2022-03-02T22:16:29.000Z
conf/configs/deploy.py
HeliumEdu/template-project
f5da3f35d6aa4ba4c3d162e59d195720e8c2101b
[ "MIT" ]
null
null
null
""" Settings specific to prod-like deployable code, reading values from system environment variables. """ import os from conf.configs import common from conf.settings import PROJECT_ID __author__ = "Alex Laird" __copyright__ = "Copyright 2018, Helium Edu" __version__ = "1.1.15" # Define the base working directory of the application BASE_DIR = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..")) # Application definition INSTALLED_APPS = common.INSTALLED_APPS MIDDLEWARE = common.MIDDLEWARE + ( "rollbar.contrib.django.middleware.RollbarNotifierMiddleware", ) TEMPLATES = common.TEMPLATES if common.DEBUG: TEMPLATES[0]["OPTIONS"]["context_processors"] += ( "django.template.context_processors.debug", ) ############################# # Django configuration ############################# # Security SESSION_ENGINE = "django.contrib.sessions.backends.cache" # Logging ROLLBAR = { "access_token": os.environ.get("PLATFORM_ROLLBAR_POST_SERVER_ITEM_ACCESS_TOKEN"), "environment": os.environ.get("ENVIRONMENT"), "branch": "main", "root": BASE_DIR, } if not common.DEBUG: ADMINS = ( (common.PROJECT_NAME, common.ADMIN_EMAIL_ADDRESS), ) MANAGERS = ADMINS LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "standard": { "format": "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s", "datefmt": "%Y-%m-%d %H:%M:%S" }, }, "filters": { "require_debug_false": { "()": "django.utils.log.RequireDebugFalse", } }, "handlers": { "rollbar": { "level": "WARN", "class": "rollbar.logger.RollbarHandler", "filters": ["require_debug_false"], }, "django": { "level": "ERROR", "class": "logging.handlers.RotatingFileHandler", "filename": f"/var/log/{PROJECT_ID}/django.log", "maxBytes": 50000000, "backupCount": 3, "formatter": "standard", }, f"{PROJECT_ID}_app": { "level": "INFO", "class": "logging.handlers.RotatingFileHandler", "filename": f"/var/log/{PROJECT_ID}/app.log", "maxBytes": 50000000, "backupCount": 3, "formatter": "standard", }, }, "loggers": { "django.request": { "handlers": ["django", "rollbar"], "level": "ERROR", "propagate": False, }, "{%PROJECT_ID_LOWER%}.app": { "handlers": [f"{PROJECT_ID}_app", "rollbar"], "level": "INFO", }, } } # Cache CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": os.environ.get("{%PROJECT_ID_UPPER%}_REDIS_HOST"), "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } }, } # Database DATABASES = { "default": { "NAME": os.environ.get("{%PROJECT_ID_UPPER%}_DB_NAME"), "ENGINE": "django.db.backends.mysql", "HOST": os.environ.get("{%PROJECT_ID_UPPER%}_DB_HOST"), "USER": os.environ.get("{%PROJECT_ID_UPPER%}_DB_USER"), "PASSWORD": os.environ.get("{%PROJECT_ID_UPPER%}_DB_PASSWORD"), } }
26.0625
97
0.566847
import os from conf.configs import common from conf.settings import PROJECT_ID __author__ = "Alex Laird" __copyright__ = "Copyright 2018, Helium Edu" __version__ = "1.1.15" BASE_DIR = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..")) INSTALLED_APPS = common.INSTALLED_APPS MIDDLEWARE = common.MIDDLEWARE + ( "rollbar.contrib.django.middleware.RollbarNotifierMiddleware", ) TEMPLATES = common.TEMPLATES if common.DEBUG: TEMPLATES[0]["OPTIONS"]["context_processors"] += ( "django.template.context_processors.debug", ) SESSION_ENGINE = "django.contrib.sessions.backends.cache" ROLLBAR = { "access_token": os.environ.get("PLATFORM_ROLLBAR_POST_SERVER_ITEM_ACCESS_TOKEN"), "environment": os.environ.get("ENVIRONMENT"), "branch": "main", "root": BASE_DIR, } if not common.DEBUG: ADMINS = ( (common.PROJECT_NAME, common.ADMIN_EMAIL_ADDRESS), ) MANAGERS = ADMINS LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "standard": { "format": "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s", "datefmt": "%Y-%m-%d %H:%M:%S" }, }, "filters": { "require_debug_false": { "()": "django.utils.log.RequireDebugFalse", } }, "handlers": { "rollbar": { "level": "WARN", "class": "rollbar.logger.RollbarHandler", "filters": ["require_debug_false"], }, "django": { "level": "ERROR", "class": "logging.handlers.RotatingFileHandler", "filename": f"/var/log/{PROJECT_ID}/django.log", "maxBytes": 50000000, "backupCount": 3, "formatter": "standard", }, f"{PROJECT_ID}_app": { "level": "INFO", "class": "logging.handlers.RotatingFileHandler", "filename": f"/var/log/{PROJECT_ID}/app.log", "maxBytes": 50000000, "backupCount": 3, "formatter": "standard", }, }, "loggers": { "django.request": { "handlers": ["django", "rollbar"], "level": "ERROR", "propagate": False, }, "{%PROJECT_ID_LOWER%}.app": { "handlers": [f"{PROJECT_ID}_app", "rollbar"], "level": "INFO", }, } } CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", "LOCATION": os.environ.get("{%PROJECT_ID_UPPER%}_REDIS_HOST"), "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", } }, } DATABASES = { "default": { "NAME": os.environ.get("{%PROJECT_ID_UPPER%}_DB_NAME"), "ENGINE": "django.db.backends.mysql", "HOST": os.environ.get("{%PROJECT_ID_UPPER%}_DB_HOST"), "USER": os.environ.get("{%PROJECT_ID_UPPER%}_DB_USER"), "PASSWORD": os.environ.get("{%PROJECT_ID_UPPER%}_DB_PASSWORD"), } }
true
true
f703dec0139b9f6b76143414797f05175d6cbaa4
7,986
py
Python
recipes/views.py
4dragunov/foodgram-project
7a5691522047fe6715e1e560c17dcf77852558fc
[ "MIT" ]
null
null
null
recipes/views.py
4dragunov/foodgram-project
7a5691522047fe6715e1e560c17dcf77852558fc
[ "MIT" ]
null
null
null
recipes/views.py
4dragunov/foodgram-project
7a5691522047fe6715e1e560c17dcf77852558fc
[ "MIT" ]
null
null
null
import operator from functools import reduce from django.contrib.auth import get_user_model from django.contrib.auth.decorators import login_required from django.db.models import Q, Sum from django.shortcuts import HttpResponse, get_object_or_404, redirect, render from django.views.generic import View from django.views.generic.base import TemplateView from .forms import RecipeForm from .models import (Purchase, Recipe, Subscription) from .utils import paginator_data User = get_user_model() def index(request): '''Вьюха отображения главной страницы''' # получаем список тегов из GET запроса tags = request.GET.getlist('tag') if tags: # фильтрация по совокупности выбранных тегов query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags)) recipies = Recipe.objects.filter(query).order_by('-date_pub') else: recipies = Recipe.objects.all().order_by('-date_pub') # Т.к. паджинатор есть почти на каждой странице - вынес некоторые моменты # в отдельную функцию в utils.py page, paginator = paginator_data(request, recipies) return render(request, 'index.html', context={'page': page, 'paginator': paginator, 'tags': tags}) def recipe_detail(request, slug): '''Вьюха отображения страницы рецепта''' recipe = get_object_or_404(Recipe, slug__iexact=slug) return render(request, 'recipe_detail.html', context={'recipe': recipe}) def profile_index(request, username): '''Персональная страница пользователя''' author = get_object_or_404(User, username=username) user = request.user tags = request.GET.getlist('tag') if tags: # фильтрация по совокупности выбранных тегов query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags)) recipies = author.recipes.filter(query).order_by('-date_pub') else: recipies = author.recipes.all().order_by('-date_pub') following = Subscription.objects.filter(user__username=user, author=author).count() return render(request, 'profile.html', context={'recipies': recipies, 'author': author, 'user': user, 'following': following, 'tags': tags}) @login_required def subscription_index(request): '''Страница подписок пользователя''' follow_authors = User.objects.filter( following__user=request.user).prefetch_related('recipes') page, paginator = paginator_data(request, follow_authors) return render(request, 'subscription_index.html', context={'page': page, 'paginator': paginator, }) @login_required def favorite_index(request): '''Страница подписок пользователя''' tags = request.GET.getlist('tag') if tags: # фильтрация по совокупности выбранных тегов query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags)) recipies = Recipe.objects.filter(query).order_by('-date_pub').filter( favorites__user=request.user).select_related('author') else: recipies = Recipe.objects.all().order_by('-date_pub').filter( favorites__user=request.user).select_related('author') page, paginator = paginator_data(request, recipies) return render(request, 'favorite_index.html', context={'page': page, 'paginator': paginator, 'tags': tags}) @login_required def purchase_index(request): '''Список покупок''' recipies = Recipe.objects.filter( purchases__user=request.user) return render(request, 'purchase_index.html', context={ 'recipies': recipies}) @login_required def get_purchase_list(request): '''Загрузка txt файла со списком ингридиентов выбранных рецептов''' file_name = 'Purchase_list.txt' txt = '' purchase = Purchase.objects.filter(user=request.user) ingredients = purchase.values('recipe__ingredients__title', 'recipe__ingredients__dimension').annotate( total_amount=Sum('recipe__ingredients__ingredient_recipe__amount' '')) result = set() for ingredient in ingredients: if ingredient['recipe__ingredients__title'] not in result: item = (f'{ingredient["recipe__ingredients__title"]} ' f'{ingredient["total_amount"]} ' f'{ingredient["recipe__ingredients__dimension"]}' ) result.add(ingredient['recipe__ingredients__title']) txt += item + '\n' response = HttpResponse(txt, content_type='application/text charset=utf-8') response['Content-Disposition'] = f'attachment; filename={file_name}' return response class RecipeCreateUpdate(View): '''Создание или редактирование рецепта''' def get(self, request, slug=None): if slug: recipe = get_object_or_404(Recipe, author__username=(self.request. user.username), slug__iexact=slug) form = RecipeForm(instance=recipe) title = 'Редактирование рецепта' botton_name = 'Изменить рецепт' context = { 'form': form, 'botton_name': botton_name, 'title': title, 'recipe': recipe, } else: form = RecipeForm() title = 'Создание рецепта' botton_name = 'Создать рецепт' context = { 'form': form, 'botton_name': botton_name, 'title': title } template = 'recipe_create_or_update.html' return render(request, template, context) def post(self, request, slug=None): if slug: recipe = get_object_or_404(Recipe, author__username=(self.request. user.username), slug__iexact=slug) if request.user != recipe.author: return redirect('index') bound_form = RecipeForm(request.POST or None, files=request.FILES or None, instance=recipe, initial={"request": request}) context = { 'form': bound_form, 'title': 'Редактирование рецепта', 'botton_name': 'Редактирование рецепта', 'recipe': recipe } else: bound_form = RecipeForm(request.POST or None, files=request.FILES or None, initial={"request": request}) context = { 'form': bound_form, 'title': 'Создание рецепта', 'botton_name': 'Создать рецепт' } if bound_form.is_valid(): new_recipe = bound_form.save(commit=False) new_recipe.tags = request.POST.getlist('tags') return redirect(new_recipe) return render(request, 'recipe_create_or_update.html', context=context) class RecipeDelete(View): '''Удаление рецепта''' def get(self, request, pk): recipe = get_object_or_404(Recipe, author=request.user, id=pk) recipe.delete() return redirect('index') class About(TemplateView): '''Об авторе''' template_name = 'about.html' class Technologies(TemplateView): '''Технологии''' template_name = 'technologies.html'
36.972222
79
0.576258
import operator from functools import reduce from django.contrib.auth import get_user_model from django.contrib.auth.decorators import login_required from django.db.models import Q, Sum from django.shortcuts import HttpResponse, get_object_or_404, redirect, render from django.views.generic import View from django.views.generic.base import TemplateView from .forms import RecipeForm from .models import (Purchase, Recipe, Subscription) from .utils import paginator_data User = get_user_model() def index(request): tags = request.GET.getlist('tag') if tags: query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags)) recipies = Recipe.objects.filter(query).order_by('-date_pub') else: recipies = Recipe.objects.all().order_by('-date_pub') page, paginator = paginator_data(request, recipies) return render(request, 'index.html', context={'page': page, 'paginator': paginator, 'tags': tags}) def recipe_detail(request, slug): recipe = get_object_or_404(Recipe, slug__iexact=slug) return render(request, 'recipe_detail.html', context={'recipe': recipe}) def profile_index(request, username): author = get_object_or_404(User, username=username) user = request.user tags = request.GET.getlist('tag') if tags: query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags)) recipies = author.recipes.filter(query).order_by('-date_pub') else: recipies = author.recipes.all().order_by('-date_pub') following = Subscription.objects.filter(user__username=user, author=author).count() return render(request, 'profile.html', context={'recipies': recipies, 'author': author, 'user': user, 'following': following, 'tags': tags}) @login_required def subscription_index(request): follow_authors = User.objects.filter( following__user=request.user).prefetch_related('recipes') page, paginator = paginator_data(request, follow_authors) return render(request, 'subscription_index.html', context={'page': page, 'paginator': paginator, }) @login_required def favorite_index(request): tags = request.GET.getlist('tag') if tags: query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags)) recipies = Recipe.objects.filter(query).order_by('-date_pub').filter( favorites__user=request.user).select_related('author') else: recipies = Recipe.objects.all().order_by('-date_pub').filter( favorites__user=request.user).select_related('author') page, paginator = paginator_data(request, recipies) return render(request, 'favorite_index.html', context={'page': page, 'paginator': paginator, 'tags': tags}) @login_required def purchase_index(request): recipies = Recipe.objects.filter( purchases__user=request.user) return render(request, 'purchase_index.html', context={ 'recipies': recipies}) @login_required def get_purchase_list(request): file_name = 'Purchase_list.txt' txt = '' purchase = Purchase.objects.filter(user=request.user) ingredients = purchase.values('recipe__ingredients__title', 'recipe__ingredients__dimension').annotate( total_amount=Sum('recipe__ingredients__ingredient_recipe__amount' '')) result = set() for ingredient in ingredients: if ingredient['recipe__ingredients__title'] not in result: item = (f'{ingredient["recipe__ingredients__title"]} ' f'{ingredient["total_amount"]} ' f'{ingredient["recipe__ingredients__dimension"]}' ) result.add(ingredient['recipe__ingredients__title']) txt += item + '\n' response = HttpResponse(txt, content_type='application/text charset=utf-8') response['Content-Disposition'] = f'attachment; filename={file_name}' return response class RecipeCreateUpdate(View): def get(self, request, slug=None): if slug: recipe = get_object_or_404(Recipe, author__username=(self.request. user.username), slug__iexact=slug) form = RecipeForm(instance=recipe) title = 'Редактирование рецепта' botton_name = 'Изменить рецепт' context = { 'form': form, 'botton_name': botton_name, 'title': title, 'recipe': recipe, } else: form = RecipeForm() title = 'Создание рецепта' botton_name = 'Создать рецепт' context = { 'form': form, 'botton_name': botton_name, 'title': title } template = 'recipe_create_or_update.html' return render(request, template, context) def post(self, request, slug=None): if slug: recipe = get_object_or_404(Recipe, author__username=(self.request. user.username), slug__iexact=slug) if request.user != recipe.author: return redirect('index') bound_form = RecipeForm(request.POST or None, files=request.FILES or None, instance=recipe, initial={"request": request}) context = { 'form': bound_form, 'title': 'Редактирование рецепта', 'botton_name': 'Редактирование рецепта', 'recipe': recipe } else: bound_form = RecipeForm(request.POST or None, files=request.FILES or None, initial={"request": request}) context = { 'form': bound_form, 'title': 'Создание рецепта', 'botton_name': 'Создать рецепт' } if bound_form.is_valid(): new_recipe = bound_form.save(commit=False) new_recipe.tags = request.POST.getlist('tags') return redirect(new_recipe) return render(request, 'recipe_create_or_update.html', context=context) class RecipeDelete(View): def get(self, request, pk): recipe = get_object_or_404(Recipe, author=request.user, id=pk) recipe.delete() return redirect('index') class About(TemplateView): template_name = 'about.html' class Technologies(TemplateView): template_name = 'technologies.html'
true
true
f703df5f0c91bc68a4c4c50b7014ee977351bac8
4,958
py
Python
backend/services/toxic_comment_jigsaw/application/ai/training/src/train.py
R-aryan/Jigsaw-Toxic-Comment-Classification
e5e4da7df379ac1b315f2bde655386180f39c517
[ "MIT" ]
null
null
null
backend/services/toxic_comment_jigsaw/application/ai/training/src/train.py
R-aryan/Jigsaw-Toxic-Comment-Classification
e5e4da7df379ac1b315f2bde655386180f39c517
[ "MIT" ]
1
2021-07-08T14:57:25.000Z
2021-07-08T14:57:25.000Z
backend/services/toxic_comment_jigsaw/application/ai/training/src/train.py
R-aryan/Jigsaw-Toxic-Comment-Classification
e5e4da7df379ac1b315f2bde655386180f39c517
[ "MIT" ]
null
null
null
import pandas as pd import numpy as np import torch from sklearn.model_selection import train_test_split from backend.services.toxic_comment_jigsaw.application.ai.model import BERTClassifier from backend.services.toxic_comment_jigsaw.application.ai.training.src.dataset import BERTDataset from backend.services.toxic_comment_jigsaw.application.ai.training.src.preprocess import Preprocess from backend.services.toxic_comment_jigsaw.application.ai.training.src.engine import Engine from backend.services.toxic_comment_jigsaw.application.ai.settings import Settings from transformers import AdamW, get_linear_schedule_with_warmup from torch.utils.data import DataLoader class Train: def __init__(self): # initialize required class self.settings = Settings self.engine = Engine() self.preprocess = Preprocess() # initialize required variables self.bert_classifier = None self.optimizer = None self.scheduler = None self.train_data_loader = None self.val_data_loader = None self.total_steps = None self.best_accuracy = 0 def __initialize(self): # Instantiate Bert Classifier self.bert_classifier = BERTClassifier(freeze_bert=False) self.bert_classifier.to(self.settings.DEVICE) # Create the optimizer self.optimizer = AdamW(self.bert_classifier.parameters(), lr=5e-5, # Default learning rate eps=1e-8 # Default epsilon value ) # Set up the learning rate scheduler self.scheduler = get_linear_schedule_with_warmup(self.optimizer, num_warmup_steps=0, # Default value num_training_steps=self.total_steps) def crete_data_loaders(self, dataset): pass def load_data(self): train_df = pd.read_csv(self.settings.TRAIN_DATA).fillna("none") train_df['comment_text'] = train_df['comment_text'].apply(lambda x: self.preprocess.clean_text(x)) X = list(train_df['comment_text']) y = np.array(train_df.loc[:, 'toxic':]) X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.20, random_state=self.settings.RANDOM_STATE) # training dataset train_dataset = BERTDataset(X_train, y_train) # validation dataset val_dataset = BERTDataset(X_val, y_val) self.train_data_loader = DataLoader(train_dataset, batch_size=self.settings.TRAIN_BATCH_SIZE, shuffle=True, num_workers=self.settings.TRAIN_NUM_WORKERS) self.val_data_loader = DataLoader(val_dataset, batch_size=self.settings.VALID_BATCH_SIZE, shuffle=True, num_workers=self.settings.VAL_NUM_WORKERS) self.total_steps = int(len(X_train) / self.settings.TRAIN_BATCH_SIZE * self.settings.EPOCHS) def train(self): for epochs in range(self.settings.EPOCHS): # calling the training function in engine.py file self.engine.train_fn(data_loader=self.train_data_loader, model=self.bert_classifier, optimizer=self.optimizer, device=self.settings.DEVICE, schedular=self.scheduler) # calling the evaluation function from the engine.py file to compute evaluation val_loss, val_accuracy = self.engine.eval_fn(data_loader=self.val_data_loader, model=self.bert_classifier, device=self.settings.DEVICE) # updating the accuracy if val_accuracy > self.best_accuracy: torch.save(self.bert_classifier.state_dict(), self.settings.MODEL_PATH) self.best_accuracy = val_accuracy def run(self): try: print("Loading and Preparing the Dataset-----!! ") self.load_data() print("Dataset Successfully Loaded and Prepared-----!! ") print() print("-" * 70) print("Loading and Initializing the Bert Model -----!! ") self.__initialize() print("Model Successfully Loaded and Initialized-----!! ") print() print("-" * 70) print("------------------Starting Training-----------!!") self.engine.set_seed() self.train() print("Training complete-----!!!") except BaseException as ex: print("Following Exception Occurred---!! ", str(ex))
42.741379
120
0.587535
import pandas as pd import numpy as np import torch from sklearn.model_selection import train_test_split from backend.services.toxic_comment_jigsaw.application.ai.model import BERTClassifier from backend.services.toxic_comment_jigsaw.application.ai.training.src.dataset import BERTDataset from backend.services.toxic_comment_jigsaw.application.ai.training.src.preprocess import Preprocess from backend.services.toxic_comment_jigsaw.application.ai.training.src.engine import Engine from backend.services.toxic_comment_jigsaw.application.ai.settings import Settings from transformers import AdamW, get_linear_schedule_with_warmup from torch.utils.data import DataLoader class Train: def __init__(self): self.settings = Settings self.engine = Engine() self.preprocess = Preprocess() self.bert_classifier = None self.optimizer = None self.scheduler = None self.train_data_loader = None self.val_data_loader = None self.total_steps = None self.best_accuracy = 0 def __initialize(self): self.bert_classifier = BERTClassifier(freeze_bert=False) self.bert_classifier.to(self.settings.DEVICE) self.optimizer = AdamW(self.bert_classifier.parameters(), lr=5e-5, eps=1e-8 ) self.scheduler = get_linear_schedule_with_warmup(self.optimizer, num_warmup_steps=0, num_training_steps=self.total_steps) def crete_data_loaders(self, dataset): pass def load_data(self): train_df = pd.read_csv(self.settings.TRAIN_DATA).fillna("none") train_df['comment_text'] = train_df['comment_text'].apply(lambda x: self.preprocess.clean_text(x)) X = list(train_df['comment_text']) y = np.array(train_df.loc[:, 'toxic':]) X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.20, random_state=self.settings.RANDOM_STATE) train_dataset = BERTDataset(X_train, y_train) val_dataset = BERTDataset(X_val, y_val) self.train_data_loader = DataLoader(train_dataset, batch_size=self.settings.TRAIN_BATCH_SIZE, shuffle=True, num_workers=self.settings.TRAIN_NUM_WORKERS) self.val_data_loader = DataLoader(val_dataset, batch_size=self.settings.VALID_BATCH_SIZE, shuffle=True, num_workers=self.settings.VAL_NUM_WORKERS) self.total_steps = int(len(X_train) / self.settings.TRAIN_BATCH_SIZE * self.settings.EPOCHS) def train(self): for epochs in range(self.settings.EPOCHS): self.engine.train_fn(data_loader=self.train_data_loader, model=self.bert_classifier, optimizer=self.optimizer, device=self.settings.DEVICE, schedular=self.scheduler) val_loss, val_accuracy = self.engine.eval_fn(data_loader=self.val_data_loader, model=self.bert_classifier, device=self.settings.DEVICE) if val_accuracy > self.best_accuracy: torch.save(self.bert_classifier.state_dict(), self.settings.MODEL_PATH) self.best_accuracy = val_accuracy def run(self): try: print("Loading and Preparing the Dataset-----!! ") self.load_data() print("Dataset Successfully Loaded and Prepared-----!! ") print() print("-" * 70) print("Loading and Initializing the Bert Model -----!! ") self.__initialize() print("Model Successfully Loaded and Initialized-----!! ") print() print("-" * 70) print("------------------Starting Training-----------!!") self.engine.set_seed() self.train() print("Training complete-----!!!") except BaseException as ex: print("Following Exception Occurred---!! ", str(ex))
true
true
f703dfbac51316faf74f9bc6a25208bbba2a78b2
22,168
py
Python
tests/hacking/checks.py
msimonin/rally-openstack
fc899c7262125e4739139f708a830ef975f386a6
[ "Apache-2.0" ]
null
null
null
tests/hacking/checks.py
msimonin/rally-openstack
fc899c7262125e4739139f708a830ef975f386a6
[ "Apache-2.0" ]
null
null
null
tests/hacking/checks.py
msimonin/rally-openstack
fc899c7262125e4739139f708a830ef975f386a6
[ "Apache-2.0" ]
null
null
null
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Guidelines for writing new hacking checks - Use only for Rally specific tests. OpenStack general tests should be submitted to the common 'hacking' module. - Pick numbers in the range N3xx. Find the current test with the highest allocated number and then pick the next value. - Keep the test method code in the source file ordered based on the N3xx value. - List the new rule in the top level HACKING.rst file - Add test cases for each new rule to tests/unit/test_hacking.py """ import functools import re import tokenize re_assert_equal_end_with_true_or_false = re.compile( r"assertEqual\(.*?, \s+(True|False)\)$") re_assert_equal_start_with_true_or_false = re.compile( r"assertEqual\((True|False),") re_assert_true_instance = re.compile( r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, " r"(\w|\.|\'|\"|\[|\])+\)\)") re_assert_equal_type = re.compile( r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), " r"(\w|\.|\'|\"|\[|\])+\)") re_assert_equal_end_with_none = re.compile(r"assertEqual\(.*?,\s+None\)$") re_assert_equal_start_with_none = re.compile(r"assertEqual\(None,") re_assert_not_equal_end_with_none = re.compile( r"assertNotEqual\(.*?,\s+None\)$") re_assert_not_equal_start_with_none = re.compile(r"assertNotEqual\(None,") re_assert_true_false_with_in_or_not_in = re.compile( r"assert(True|False)\(" r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])+(, .*)?\)") re_assert_true_false_with_in_or_not_in_spaces = re.compile( r"assert(True|False)\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|[][.'\", ])+" r"[\[|'|\"](, .*)?\)") re_assert_equal_in_end_with_true_or_false = re.compile( r"assertEqual\((\w|[][.'\"])+( not)? in (\w|[][.'\", ])+, (True|False)\)") re_assert_equal_in_start_with_true_or_false = re.compile( r"assertEqual\((True|False), (\w|[][.'\"])+( not)? in (\w|[][.'\", ])+\)") re_no_construct_dict = re.compile( r"\sdict\(\)") re_no_construct_list = re.compile( r"\slist\(\)") re_str_format = re.compile(r""" % # start of specifier \(([^)]+)\) # mapping key, in group 1 [#0 +\-]? # optional conversion flag (?:-?\d*)? # optional minimum field width (?:\.\d*)? # optional precision [hLl]? # optional length modifier [A-z%] # conversion modifier """, re.X) re_raises = re.compile( r"\s:raise[^s] *.*$|\s:raises *:.*$|\s:raises *[^:]+$") re_db_import = re.compile(r"^from rally.common import db") re_objects_import = re.compile(r"^from rally.common import objects") re_old_type_class = re.compile(r"^\s*class \w+(\(\))?:") re_datetime_alias = re.compile(r"^(from|import) datetime(?!\s+as\s+dt$)") re_log_warn = re.compile(r"(.)*LOG\.(warn)\(\s*('|\"|_)") def skip_ignored_lines(func): @functools.wraps(func) def wrapper(logical_line, physical_line, filename): line = physical_line.strip() if not line or line.startswith("#") or line.endswith("# noqa"): return yield next(func(logical_line, physical_line, filename)) return wrapper def _parse_assert_mock_str(line): point = line.find(".assert_") if point == -1: point = line.find(".called_once_with(") if point != -1: end_pos = line[point:].find("(") + point return point, line[point + 1: end_pos], line[: point] else: return None, None, None @skip_ignored_lines def check_assert_methods_from_mock(logical_line, physical_line, filename): """Ensure that ``assert_*`` methods from ``mock`` library is used correctly N301 - base error number N302 - related to nonexistent "assert_called" N303 - related to nonexistent "assert_called_once" N304 - related to nonexistent "called_once_with" """ correct_names = ["assert_any_call", "assert_called_once_with", "assert_called_with", "assert_has_calls", "assert_not_called"] ignored_files = ["./tests/unit/test_hacking.py"] if filename.startswith("./tests") and filename not in ignored_files: pos, method_name, obj_name = _parse_assert_mock_str(logical_line) if pos: if method_name not in correct_names: error_number = "N301" msg = ("%(error_number)s:'%(method)s' is not present in `mock`" " library. %(custom_msg)s For more details, visit " "http://www.voidspace.org.uk/python/mock/ .") if method_name == "assert_called": error_number = "N302" custom_msg = ("Maybe, you should try to use " "'assertTrue(%s.called)' instead." % obj_name) elif method_name == "assert_called_once": # For more details, see a bug in Rally: # https://bugs.launchpad.net/rally/+bug/1305991 error_number = "N303" custom_msg = ("Maybe, you should try to use " "'assertEqual(1, %s.call_count)' " "or '%s.assert_called_once_with()'" " instead." % (obj_name, obj_name)) elif method_name == "called_once_with": error_number = "N304" custom_msg = ("Maybe, you should try to use " "'%s.assert_called_once_with()'" " instead." % obj_name) else: custom_msg = ("Correct 'assert_*' methods: '%s'." % "', '".join(correct_names)) yield (pos, msg % { "error_number": error_number, "method": method_name, "custom_msg": custom_msg}) @skip_ignored_lines def check_import_of_logging(logical_line, physical_line, filename): """Check correctness import of logging module N310 """ excluded_files = ["./rally/common/logging.py", "./tests/unit/test_logging.py", "./tests/ci/rally_verify.py", "./tests/ci/sync_requirements.py"] forbidden_imports = ["from oslo_log", "import oslo_log", "import logging"] if filename not in excluded_files: for forbidden_import in forbidden_imports: if logical_line.startswith(forbidden_import): yield (0, "N310 Wrong module for logging is imported. Please " "use `rally.common.logging` instead.") @skip_ignored_lines def check_import_of_config(logical_line, physical_line, filename): """Check correctness import of config module N311 """ excluded_files = ["./rally/common/cfg.py"] forbidden_imports = ["from oslo_config", "import oslo_config"] if filename not in excluded_files: for forbidden_import in forbidden_imports: if logical_line.startswith(forbidden_import): yield (0, "N311 Wrong module for config is imported. Please " "use `rally.common.cfg` instead.") @skip_ignored_lines def no_use_conf_debug_check(logical_line, physical_line, filename): """Check for "cfg.CONF.debug" Rally has two DEBUG level: - Full DEBUG, which include all debug-messages from all OpenStack services - Rally DEBUG, which include only Rally debug-messages so we should use custom check to know debug-mode, instead of CONF.debug N312 """ excluded_files = ["./rally/common/logging.py"] point = logical_line.find("CONF.debug") if point != -1 and filename not in excluded_files: yield(point, "N312 Don't use `CONF.debug`. " "Function `rally.common.logging.is_debug` " "should be used instead.") @skip_ignored_lines def assert_true_instance(logical_line, physical_line, filename): """Check for assertTrue(isinstance(a, b)) sentences N320 """ if re_assert_true_instance.match(logical_line): yield (0, "N320 assertTrue(isinstance(a, b)) sentences not allowed, " "you should use assertIsInstance(a, b) instead.") @skip_ignored_lines def assert_equal_type(logical_line, physical_line, filename): """Check for assertEqual(type(A), B) sentences N321 """ if re_assert_equal_type.match(logical_line): yield (0, "N321 assertEqual(type(A), B) sentences not allowed, " "you should use assertIsInstance(a, b) instead.") @skip_ignored_lines def assert_equal_none(logical_line, physical_line, filename): """Check for assertEqual(A, None) or assertEqual(None, A) sentences N322 """ res = (re_assert_equal_start_with_none.search(logical_line) or re_assert_equal_end_with_none.search(logical_line)) if res: yield (0, "N322 assertEqual(A, None) or assertEqual(None, A) " "sentences not allowed, you should use assertIsNone(A) " "instead.") @skip_ignored_lines def assert_true_or_false_with_in(logical_line, physical_line, filename): """Check assertTrue/False(A in/not in B) with collection contents Check for assertTrue/False(A in B), assertTrue/False(A not in B), assertTrue/False(A in B, message) or assertTrue/False(A not in B, message) sentences. N323 """ res = (re_assert_true_false_with_in_or_not_in.search(logical_line) or re_assert_true_false_with_in_or_not_in_spaces.search(logical_line)) if res: yield (0, "N323 assertTrue/assertFalse(A in/not in B)sentences not " "allowed, you should use assertIn(A, B) or assertNotIn(A, B)" " instead.") @skip_ignored_lines def assert_equal_in(logical_line, physical_line, filename): """Check assertEqual(A in/not in B, True/False) with collection contents Check for assertEqual(A in B, True/False), assertEqual(True/False, A in B), assertEqual(A not in B, True/False) or assertEqual(True/False, A not in B) sentences. N324 """ res = (re_assert_equal_in_end_with_true_or_false.search(logical_line) or re_assert_equal_in_start_with_true_or_false.search(logical_line)) if res: yield (0, "N324: Use assertIn/NotIn(A, B) rather than " "assertEqual(A in/not in B, True/False) when checking " "collection contents.") @skip_ignored_lines def assert_not_equal_none(logical_line, physical_line, filename): """Check for assertNotEqual(A, None) or assertEqual(None, A) sentences N325 """ res = (re_assert_not_equal_start_with_none.search(logical_line) or re_assert_not_equal_end_with_none.search(logical_line)) if res: yield (0, "N325 assertNotEqual(A, None) or assertNotEqual(None, A) " "sentences not allowed, you should use assertIsNotNone(A) " "instead.") @skip_ignored_lines def assert_equal_true_or_false(logical_line, physical_line, filename): """Check for assertEqual(A, True/False) sentences Check for assertEqual(A, True/False) sentences or assertEqual(True/False, A) N326 """ res = (re_assert_equal_end_with_true_or_false.search(logical_line) or re_assert_equal_start_with_true_or_false.search(logical_line)) if res: yield (0, "N326 assertEqual(A, True/False) or " "assertEqual(True/False, A) sentences not allowed," "you should use assertTrue(A) or assertFalse(A) instead.") @skip_ignored_lines def check_no_direct_rally_objects_import(logical_line, physical_line, filename): """Check if rally.common.objects are properly imported. If you import "from rally.common import objects" you are able to use objects directly like objects.Task. N340 """ if filename == "./rally/common/objects/__init__.py": return if filename == "./rally/common/objects/endpoint.py": return if (logical_line.startswith("from rally.common.objects") or logical_line.startswith("import rally.common.objects.")): yield (0, "N340: Import objects module:" "`from rally.common import objects`. " "After that you can use directly objects e.g. objects.Task") @skip_ignored_lines def check_no_oslo_deprecated_import(logical_line, physical_line, filename): """Check if oslo.foo packages are not imported instead of oslo_foo ones. Libraries from oslo.foo namespace are deprecated because of namespace problems. N341 """ if (logical_line.startswith("from oslo.") or logical_line.startswith("import oslo.")): yield (0, "N341: Import oslo module: `from oslo_xyz import ...`. " "The oslo.xyz namespace was deprecated, use oslo_xyz " "instead") @skip_ignored_lines def check_quotes(logical_line, physical_line, filename): """Check that single quotation marks are not used N350 """ in_string = False in_multiline_string = False single_quotas_are_used = False check_tripple = ( lambda line, i, char: ( i + 2 < len(line) and (char == line[i] == line[i + 1] == line[i + 2]) ) ) i = 0 while i < len(logical_line): char = logical_line[i] if in_string: if char == "\"": in_string = False if char == "\\": i += 1 # ignore next char elif in_multiline_string: if check_tripple(logical_line, i, "\""): i += 2 # skip next 2 chars in_multiline_string = False elif char == "#": break elif char == "'": single_quotas_are_used = True break elif char == "\"": if check_tripple(logical_line, i, "\""): in_multiline_string = True i += 3 continue in_string = True i += 1 if single_quotas_are_used: yield (i, "N350 Remove Single quotes") @skip_ignored_lines def check_no_constructor_data_struct(logical_line, physical_line, filename): """Check that data structs (lists, dicts) are declared using literals N351 """ match = re_no_construct_dict.search(logical_line) if match: yield (0, "N351 Remove dict() construct and use literal {}") match = re_no_construct_list.search(logical_line) if match: yield (0, "N351 Remove list() construct and use literal []") def check_dict_formatting_in_string(logical_line, tokens): """Check that strings do not use dict-formatting with a single replacement N352 """ # NOTE(stpierre): Can't use @skip_ignored_lines here because it's # a stupid decorator that only works on functions that take # (logical_line, filename) as arguments. if (not logical_line or logical_line.startswith("#") or logical_line.endswith("# noqa")): return current_string = "" in_string = False for token_type, text, start, end, line in tokens: if token_type == tokenize.STRING: if not in_string: current_string = "" in_string = True current_string += text.strip("\"") elif token_type == tokenize.OP: if not current_string: continue # NOTE(stpierre): The string formatting operator % has # lower precedence than +, so we assume that the logical # string has concluded whenever we hit an operator of any # sort. (Most operators don't work for strings anyway.) # Some string operators do have higher precedence than %, # though, so you can technically trick this check by doing # things like: # # "%(foo)s" * 1 % {"foo": 1} # "%(foo)s"[:] % {"foo": 1} # # It also will produce false positives if you use explicit # parenthesized addition for two strings instead of # concatenation by juxtaposition, e.g.: # # ("%(foo)s" + "%(bar)s") % vals # # But if you do any of those things, then you deserve all # of the horrible things that happen to you, and probably # many more. in_string = False if text == "%": format_keys = set() for match in re_str_format.finditer(current_string): format_keys.add(match.group(1)) if len(format_keys) == 1: yield (0, "N353 Do not use mapping key string formatting " "with a single key") if text != ")": # NOTE(stpierre): You can have a parenthesized string # followed by %, so a closing paren doesn't obviate # the possibility for a substitution operator like # every other operator does. current_string = "" elif token_type in (tokenize.NL, tokenize.COMMENT): continue else: in_string = False if token_type == tokenize.NEWLINE: current_string = "" @skip_ignored_lines def check_using_unicode(logical_line, physical_line, filename): """Check crosspython unicode usage N353 """ if re.search(r"\bunicode\(", logical_line): yield (0, "N353 'unicode' function is absent in python3. Please " "use 'six.text_type' instead.") def check_raises(physical_line, filename): """Check raises usage N354 """ ignored_files = ["./tests/unit/test_hacking.py", "./tests/hacking/checks.py"] if filename not in ignored_files: if re_raises.search(physical_line): return (0, "N354 ':Please use ':raises Exception: conditions' " "in docstrings.") @skip_ignored_lines def check_old_type_class(logical_line, physical_line, filename): """Use new-style Python classes N355 """ if re_old_type_class.search(logical_line): yield (0, "N355 This class does not inherit from anything and thus " "will be an old-style class by default. Try to inherit from " "``object`` or another new-style class.") @skip_ignored_lines def check_datetime_alias(logical_line, physical_line, filename): """Ensure using ``dt`` as alias for ``datetime`` N356 """ if re_datetime_alias.search(logical_line): yield (0, "N356 Please use ``dt`` as alias for ``datetime``.") @skip_ignored_lines def check_no_six_iteritems(logical_line, physical_line, filename): """Check no six.iteritems N357 """ if re.search(r"\six.iteritems\(\)", logical_line): yield (0, "N357 Use dict.items() instead of six.iteritems()") @skip_ignored_lines def check_db_imports_in_cli(logical_line, physical_line, filename): """Ensure that CLI modules do not use ``rally.common.db`` N360 """ if (not filename.startswith("./rally/cli") or filename == "./rally/cli/commands/db.py"): return if re_db_import.search(logical_line): yield (0, "N360 CLI modules do not allow to work with " "`rally.common.db``.") @skip_ignored_lines def check_objects_imports_in_cli(logical_line, physical_line, filename): """Ensure that CLI modules do not use ``rally.common.objects`` N361 """ if not filename.startswith("./rally/cli"): return if re_objects_import.search(logical_line): yield (0, "N361 CLI modules do not allow to work with " "`rally.common.objects``.") @skip_ignored_lines def check_log_warn(logical_line, physical_line, filename): if re_log_warn.search(logical_line): yield(0, "N313 LOG.warn is deprecated, please use LOG.warning") @skip_ignored_lines def check_opts_import_path(logical_line, physical_line, filename): """Ensure that we load opts from correct paths only N342 """ excluded_files = ["./rally_openstack/__init__.py"] forbidden_methods = [".register_opts("] if filename not in excluded_files: for forbidden_method in forbidden_methods: if logical_line.find(forbidden_method) != -1: yield (0, "N342 All options should be loaded from correct " "paths only - rally_openstack.cfg module.") def factory(register): register(check_assert_methods_from_mock) register(check_import_of_logging) register(check_import_of_config) register(no_use_conf_debug_check) register(assert_true_instance) register(assert_equal_type) register(assert_equal_none) register(assert_true_or_false_with_in) register(assert_equal_in) register(assert_equal_true_or_false) register(check_no_direct_rally_objects_import) register(check_no_oslo_deprecated_import) register(check_quotes) register(check_no_constructor_data_struct) register(check_dict_formatting_in_string) register(check_using_unicode) register(check_raises) register(check_datetime_alias) register(check_db_imports_in_cli) register(check_objects_imports_in_cli) register(check_old_type_class) register(check_no_six_iteritems) register(check_log_warn) register(check_opts_import_path)
35.243243
79
0.621391
import functools import re import tokenize re_assert_equal_end_with_true_or_false = re.compile( r"assertEqual\(.*?, \s+(True|False)\)$") re_assert_equal_start_with_true_or_false = re.compile( r"assertEqual\((True|False),") re_assert_true_instance = re.compile( r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, " r"(\w|\.|\'|\"|\[|\])+\)\)") re_assert_equal_type = re.compile( r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), " r"(\w|\.|\'|\"|\[|\])+\)") re_assert_equal_end_with_none = re.compile(r"assertEqual\(.*?,\s+None\)$") re_assert_equal_start_with_none = re.compile(r"assertEqual\(None,") re_assert_not_equal_end_with_none = re.compile( r"assertNotEqual\(.*?,\s+None\)$") re_assert_not_equal_start_with_none = re.compile(r"assertNotEqual\(None,") re_assert_true_false_with_in_or_not_in = re.compile( r"assert(True|False)\(" r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])+(, .*)?\)") re_assert_true_false_with_in_or_not_in_spaces = re.compile( r"assert(True|False)\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|[][.'\", ])+" r"[\[|'|\"](, .*)?\)") re_assert_equal_in_end_with_true_or_false = re.compile( r"assertEqual\((\w|[][.'\"])+( not)? in (\w|[][.'\", ])+, (True|False)\)") re_assert_equal_in_start_with_true_or_false = re.compile( r"assertEqual\((True|False), (\w|[][.'\"])+( not)? in (\w|[][.'\", ])+\)") re_no_construct_dict = re.compile( r"\sdict\(\)") re_no_construct_list = re.compile( r"\slist\(\)") re_str_format = re.compile(r""" % # start of specifier \(([^)]+)\) # mapping key, in group 1 [#0 +\-]? # optional conversion flag (?:-?\d*)? # optional minimum field width (?:\.\d*)? # optional precision [hLl]? # optional length modifier [A-z%] # conversion modifier """, re.X) re_raises = re.compile( r"\s:raise[^s] *.*$|\s:raises *:.*$|\s:raises *[^:]+$") re_db_import = re.compile(r"^from rally.common import db") re_objects_import = re.compile(r"^from rally.common import objects") re_old_type_class = re.compile(r"^\s*class \w+(\(\))?:") re_datetime_alias = re.compile(r"^(from|import) datetime(?!\s+as\s+dt$)") re_log_warn = re.compile(r"(.)*LOG\.(warn)\(\s*('|\"|_)") def skip_ignored_lines(func): @functools.wraps(func) def wrapper(logical_line, physical_line, filename): line = physical_line.strip() if not line or line.startswith("#") or line.endswith("# noqa"): return yield next(func(logical_line, physical_line, filename)) return wrapper def _parse_assert_mock_str(line): point = line.find(".assert_") if point == -1: point = line.find(".called_once_with(") if point != -1: end_pos = line[point:].find("(") + point return point, line[point + 1: end_pos], line[: point] else: return None, None, None @skip_ignored_lines def check_assert_methods_from_mock(logical_line, physical_line, filename): correct_names = ["assert_any_call", "assert_called_once_with", "assert_called_with", "assert_has_calls", "assert_not_called"] ignored_files = ["./tests/unit/test_hacking.py"] if filename.startswith("./tests") and filename not in ignored_files: pos, method_name, obj_name = _parse_assert_mock_str(logical_line) if pos: if method_name not in correct_names: error_number = "N301" msg = ("%(error_number)s:'%(method)s' is not present in `mock`" " library. %(custom_msg)s For more details, visit " "http://www.voidspace.org.uk/python/mock/ .") if method_name == "assert_called": error_number = "N302" custom_msg = ("Maybe, you should try to use " "'assertTrue(%s.called)' instead." % obj_name) elif method_name == "assert_called_once": # For more details, see a bug in Rally: # https://bugs.launchpad.net/rally/+bug/1305991 error_number = "N303" custom_msg = ("Maybe, you should try to use " "'assertEqual(1, %s.call_count)' " "or '%s.assert_called_once_with()'" " instead." % (obj_name, obj_name)) elif method_name == "called_once_with": error_number = "N304" custom_msg = ("Maybe, you should try to use " "'%s.assert_called_once_with()'" " instead." % obj_name) else: custom_msg = ("Correct 'assert_*' methods: '%s'." % "', '".join(correct_names)) yield (pos, msg % { "error_number": error_number, "method": method_name, "custom_msg": custom_msg}) @skip_ignored_lines def check_import_of_logging(logical_line, physical_line, filename): excluded_files = ["./rally/common/logging.py", "./tests/unit/test_logging.py", "./tests/ci/rally_verify.py", "./tests/ci/sync_requirements.py"] forbidden_imports = ["from oslo_log", "import oslo_log", "import logging"] if filename not in excluded_files: for forbidden_import in forbidden_imports: if logical_line.startswith(forbidden_import): yield (0, "N310 Wrong module for logging is imported. Please " "use `rally.common.logging` instead.") @skip_ignored_lines def check_import_of_config(logical_line, physical_line, filename): excluded_files = ["./rally/common/cfg.py"] forbidden_imports = ["from oslo_config", "import oslo_config"] if filename not in excluded_files: for forbidden_import in forbidden_imports: if logical_line.startswith(forbidden_import): yield (0, "N311 Wrong module for config is imported. Please " "use `rally.common.cfg` instead.") @skip_ignored_lines def no_use_conf_debug_check(logical_line, physical_line, filename): excluded_files = ["./rally/common/logging.py"] point = logical_line.find("CONF.debug") if point != -1 and filename not in excluded_files: yield(point, "N312 Don't use `CONF.debug`. " "Function `rally.common.logging.is_debug` " "should be used instead.") @skip_ignored_lines def assert_true_instance(logical_line, physical_line, filename): if re_assert_true_instance.match(logical_line): yield (0, "N320 assertTrue(isinstance(a, b)) sentences not allowed, " "you should use assertIsInstance(a, b) instead.") @skip_ignored_lines def assert_equal_type(logical_line, physical_line, filename): if re_assert_equal_type.match(logical_line): yield (0, "N321 assertEqual(type(A), B) sentences not allowed, " "you should use assertIsInstance(a, b) instead.") @skip_ignored_lines def assert_equal_none(logical_line, physical_line, filename): res = (re_assert_equal_start_with_none.search(logical_line) or re_assert_equal_end_with_none.search(logical_line)) if res: yield (0, "N322 assertEqual(A, None) or assertEqual(None, A) " "sentences not allowed, you should use assertIsNone(A) " "instead.") @skip_ignored_lines def assert_true_or_false_with_in(logical_line, physical_line, filename): res = (re_assert_true_false_with_in_or_not_in.search(logical_line) or re_assert_true_false_with_in_or_not_in_spaces.search(logical_line)) if res: yield (0, "N323 assertTrue/assertFalse(A in/not in B)sentences not " "allowed, you should use assertIn(A, B) or assertNotIn(A, B)" " instead.") @skip_ignored_lines def assert_equal_in(logical_line, physical_line, filename): res = (re_assert_equal_in_end_with_true_or_false.search(logical_line) or re_assert_equal_in_start_with_true_or_false.search(logical_line)) if res: yield (0, "N324: Use assertIn/NotIn(A, B) rather than " "assertEqual(A in/not in B, True/False) when checking " "collection contents.") @skip_ignored_lines def assert_not_equal_none(logical_line, physical_line, filename): res = (re_assert_not_equal_start_with_none.search(logical_line) or re_assert_not_equal_end_with_none.search(logical_line)) if res: yield (0, "N325 assertNotEqual(A, None) or assertNotEqual(None, A) " "sentences not allowed, you should use assertIsNotNone(A) " "instead.") @skip_ignored_lines def assert_equal_true_or_false(logical_line, physical_line, filename): res = (re_assert_equal_end_with_true_or_false.search(logical_line) or re_assert_equal_start_with_true_or_false.search(logical_line)) if res: yield (0, "N326 assertEqual(A, True/False) or " "assertEqual(True/False, A) sentences not allowed," "you should use assertTrue(A) or assertFalse(A) instead.") @skip_ignored_lines def check_no_direct_rally_objects_import(logical_line, physical_line, filename): if filename == "./rally/common/objects/__init__.py": return if filename == "./rally/common/objects/endpoint.py": return if (logical_line.startswith("from rally.common.objects") or logical_line.startswith("import rally.common.objects.")): yield (0, "N340: Import objects module:" "`from rally.common import objects`. " "After that you can use directly objects e.g. objects.Task") @skip_ignored_lines def check_no_oslo_deprecated_import(logical_line, physical_line, filename): if (logical_line.startswith("from oslo.") or logical_line.startswith("import oslo.")): yield (0, "N341: Import oslo module: `from oslo_xyz import ...`. " "The oslo.xyz namespace was deprecated, use oslo_xyz " "instead") @skip_ignored_lines def check_quotes(logical_line, physical_line, filename): in_string = False in_multiline_string = False single_quotas_are_used = False check_tripple = ( lambda line, i, char: ( i + 2 < len(line) and (char == line[i] == line[i + 1] == line[i + 2]) ) ) i = 0 while i < len(logical_line): char = logical_line[i] if in_string: if char == "\"": in_string = False if char == "\\": i += 1 elif in_multiline_string: if check_tripple(logical_line, i, "\""): i += 2 # skip next 2 chars in_multiline_string = False elif char == " break elif char == "'": single_quotas_are_used = True break elif char == "\"": if check_tripple(logical_line, i, "\""): in_multiline_string = True i += 3 continue in_string = True i += 1 if single_quotas_are_used: yield (i, "N350 Remove Single quotes") @skip_ignored_lines def check_no_constructor_data_struct(logical_line, physical_line, filename): match = re_no_construct_dict.search(logical_line) if match: yield (0, "N351 Remove dict() construct and use literal {}") match = re_no_construct_list.search(logical_line) if match: yield (0, "N351 Remove list() construct and use literal []") def check_dict_formatting_in_string(logical_line, tokens): # NOTE(stpierre): Can't use @skip_ignored_lines here because it's # a stupid decorator that only works on functions that take # (logical_line, filename) as arguments. if (not logical_line or logical_line.startswith("#") or logical_line.endswith("# noqa")): return current_string = "" in_string = False for token_type, text, start, end, line in tokens: if token_type == tokenize.STRING: if not in_string: current_string = "" in_string = True current_string += text.strip("\"") elif token_type == tokenize.OP: if not current_string: continue # NOTE(stpierre): The string formatting operator % has # lower precedence than +, so we assume that the logical # string has concluded whenever we hit an operator of any # sort. (Most operators don't work for strings anyway.) in_string = False if text == "%": format_keys = set() for match in re_str_format.finditer(current_string): format_keys.add(match.group(1)) if len(format_keys) == 1: yield (0, "N353 Do not use mapping key string formatting " "with a single key") if text != ")": # the possibility for a substitution operator like # every other operator does. current_string = "" elif token_type in (tokenize.NL, tokenize.COMMENT): continue else: in_string = False if token_type == tokenize.NEWLINE: current_string = "" @skip_ignored_lines def check_using_unicode(logical_line, physical_line, filename): if re.search(r"\bunicode\(", logical_line): yield (0, "N353 'unicode' function is absent in python3. Please " "use 'six.text_type' instead.") def check_raises(physical_line, filename): ignored_files = ["./tests/unit/test_hacking.py", "./tests/hacking/checks.py"] if filename not in ignored_files: if re_raises.search(physical_line): return (0, "N354 ':Please use ':raises Exception: conditions' " "in docstrings.") @skip_ignored_lines def check_old_type_class(logical_line, physical_line, filename): if re_old_type_class.search(logical_line): yield (0, "N355 This class does not inherit from anything and thus " "will be an old-style class by default. Try to inherit from " "``object`` or another new-style class.") @skip_ignored_lines def check_datetime_alias(logical_line, physical_line, filename): if re_datetime_alias.search(logical_line): yield (0, "N356 Please use ``dt`` as alias for ``datetime``.") @skip_ignored_lines def check_no_six_iteritems(logical_line, physical_line, filename): if re.search(r"\six.iteritems\(\)", logical_line): yield (0, "N357 Use dict.items() instead of six.iteritems()") @skip_ignored_lines def check_db_imports_in_cli(logical_line, physical_line, filename): if (not filename.startswith("./rally/cli") or filename == "./rally/cli/commands/db.py"): return if re_db_import.search(logical_line): yield (0, "N360 CLI modules do not allow to work with " "`rally.common.db``.") @skip_ignored_lines def check_objects_imports_in_cli(logical_line, physical_line, filename): if not filename.startswith("./rally/cli"): return if re_objects_import.search(logical_line): yield (0, "N361 CLI modules do not allow to work with " "`rally.common.objects``.") @skip_ignored_lines def check_log_warn(logical_line, physical_line, filename): if re_log_warn.search(logical_line): yield(0, "N313 LOG.warn is deprecated, please use LOG.warning") @skip_ignored_lines def check_opts_import_path(logical_line, physical_line, filename): excluded_files = ["./rally_openstack/__init__.py"] forbidden_methods = [".register_opts("] if filename not in excluded_files: for forbidden_method in forbidden_methods: if logical_line.find(forbidden_method) != -1: yield (0, "N342 All options should be loaded from correct " "paths only - rally_openstack.cfg module.") def factory(register): register(check_assert_methods_from_mock) register(check_import_of_logging) register(check_import_of_config) register(no_use_conf_debug_check) register(assert_true_instance) register(assert_equal_type) register(assert_equal_none) register(assert_true_or_false_with_in) register(assert_equal_in) register(assert_equal_true_or_false) register(check_no_direct_rally_objects_import) register(check_no_oslo_deprecated_import) register(check_quotes) register(check_no_constructor_data_struct) register(check_dict_formatting_in_string) register(check_using_unicode) register(check_raises) register(check_datetime_alias) register(check_db_imports_in_cli) register(check_objects_imports_in_cli) register(check_old_type_class) register(check_no_six_iteritems) register(check_log_warn) register(check_opts_import_path)
true
true
f703e067097e7fd54be3a51d8ada504b3532e390
357
py
Python
carafe_layer/setup.py
chensnathan/CARAFE_CUDA
33d3d3af69b24fc679f6a3a071a19070dc46664b
[ "MIT" ]
1
2019-10-28T15:05:30.000Z
2019-10-28T15:05:30.000Z
carafe_layer/setup.py
chensnathan/CARAFE_CUDA
33d3d3af69b24fc679f6a3a071a19070dc46664b
[ "MIT" ]
1
2021-03-25T10:24:19.000Z
2021-06-08T15:07:34.000Z
carafe_layer/setup.py
chensnathan/CARAFE_CUDA
33d3d3af69b24fc679f6a3a071a19070dc46664b
[ "MIT" ]
1
2020-01-03T09:42:22.000Z
2020-01-03T09:42:22.000Z
from setuptools import setup from torch.utils.cpp_extension import BuildExtension, CUDAExtension setup( name='carafe_layer_cuda', ext_modules=[ CUDAExtension('carafe_layer_cuda', [ 'src/carafe_layer_cuda.cpp', 'src/carafe_layer_kernel.cu', ]) ], cmdclass={ 'build_ext': BuildExtension })
23.8
67
0.644258
from setuptools import setup from torch.utils.cpp_extension import BuildExtension, CUDAExtension setup( name='carafe_layer_cuda', ext_modules=[ CUDAExtension('carafe_layer_cuda', [ 'src/carafe_layer_cuda.cpp', 'src/carafe_layer_kernel.cu', ]) ], cmdclass={ 'build_ext': BuildExtension })
true
true
f703e2683a06f78f7c2643f6719d0e4cc5da5092
429
py
Python
src/saffine/detrending_coeff.py
AU-DATALAB/newsFluxus
20522b2c8c830d2377a9620d149a515baaaa9cf4
[ "MIT" ]
3
2020-06-17T07:56:27.000Z
2021-09-30T09:50:50.000Z
src/saffine/detrending_coeff.py
AU-DATALAB/newsFluxus
20522b2c8c830d2377a9620d149a515baaaa9cf4
[ "MIT" ]
null
null
null
src/saffine/detrending_coeff.py
AU-DATALAB/newsFluxus
20522b2c8c830d2377a9620d149a515baaaa9cf4
[ "MIT" ]
2
2021-02-23T11:36:55.000Z
2021-03-04T10:36:19.000Z
from numpy import * import numpy as np # from numba import jit # @jit def detrending_coeff(win_len , order): #win_len = 51 #order = 2 n = (win_len-1)/2 A = mat(ones((win_len,order+1))) x = np.arange(-n , n+1) for j in range(0 , order + 1): A[:,j] = mat(x ** j).T coeff_output = (A.T * A).I * A.T return coeff_output , A # coeff_output,A = detrending_coeff(5,2) # print(coeff_output) # print(A)
18.652174
41
0.599068
from numpy import * import numpy as np def detrending_coeff(win_len , order): n = (win_len-1)/2 A = mat(ones((win_len,order+1))) x = np.arange(-n , n+1) for j in range(0 , order + 1): A[:,j] = mat(x ** j).T coeff_output = (A.T * A).I * A.T return coeff_output , A
true
true
f703e2c70bc3ec943e58a06933c76b53f568ea8b
3,851
py
Python
opentraj/toolkit/loaders/loader_pets.py
RedTachyon/OpenTraj
8277f526d714a4e77d0f9f354259ff5b74e59fd2
[ "MIT" ]
null
null
null
opentraj/toolkit/loaders/loader_pets.py
RedTachyon/OpenTraj
8277f526d714a4e77d0f9f354259ff5b74e59fd2
[ "MIT" ]
null
null
null
opentraj/toolkit/loaders/loader_pets.py
RedTachyon/OpenTraj
8277f526d714a4e77d0f9f354259ff5b74e59fd2
[ "MIT" ]
null
null
null
# Author: Javad Amirian # Email: amiryan.j@gmail.com import xml.etree.ElementTree as et import numpy as np import pandas as pd from opentraj.toolkit.core.trajdataset import TrajDataset from opentraj.toolkit.utils.calibration.camera_calibration_tsai import * def load_pets(path, **kwargs): """ :param path: address of annotation file :param kwargs: :param calib_path: address of calibration file :return: TrajectoryDataset object """ traj_dataset = TrajDataset() annot_xtree = et.parse(path) annot_xroot = annot_xtree.getroot() # dataset cp, cc = None, None # calibration parameters # load calibration calib_path = kwargs.get('calib_path', "") if calib_path: cp = CameraParameters() cc = CalibrationConstants() calib_xtree = et.parse(calib_path) calib_xroot = calib_xtree.getroot() # Camera geometry_node = calib_xroot.find("Geometry") width = int(geometry_node.attrib["width"]) height = int(geometry_node.attrib["height"]) cp.Ncx = float(geometry_node.attrib["ncx"]) cp.Nfx = float(geometry_node.attrib["nfx"]) cp.dx = float(geometry_node.attrib["dx"]) cp.dy = float(geometry_node.attrib["dy"]) cp.dpx = float(geometry_node.attrib["dpx"]) cp.dpy = float(geometry_node.attrib["dpy"]) intrinsic_node = calib_xroot.find("Intrinsic") cc.f = float(intrinsic_node.attrib["focal"]) cc.kappa1 = float(intrinsic_node.attrib["kappa1"]) # 1st order radial distortion cp.Cx = float(intrinsic_node.attrib["cx"]) cp.Cy = float(intrinsic_node.attrib["cy"]) cp.sx = float(intrinsic_node.attrib["sx"]) extrinsic_node = calib_xroot.find("Extrinsic") cc.Tx = float(extrinsic_node.attrib["tx"]) cc.Ty = float(extrinsic_node.attrib["ty"]) cc.Tz = float(extrinsic_node.attrib["tz"]) cc.Rx = float(extrinsic_node.attrib["rx"]) cc.Ry = float(extrinsic_node.attrib["ry"]) cc.Rz = float(extrinsic_node.attrib["rz"]) cc.calc_rr() # Calculate Rotation Matrix loaded_data = [] # frame_id, agent_id, pos_x, pos_y, xc, yc, h, w for frame_node in annot_xroot: objectlist_node = frame_node.find("objectlist") # .text object_nodes = objectlist_node.findall("object") frame_id = int(frame_node.attrib.get("number")) for obj_node in object_nodes: agent_id = obj_node.attrib["id"] box_node = obj_node.find("box") xc = float(box_node.attrib["xc"]) yc = float(box_node.attrib["yc"]) h = float(box_node.attrib["h"]) w = float(box_node.attrib["w"]) x_ground = xc y_ground = yc + h/2 if cp: pos_x, pos_y = image_coord_to_world_coord(x_ground, y_ground, 0, cp, cc) else: pos_x, pos_y = np.nan, np.nan loaded_data.append([frame_id, agent_id, pos_x / 1000., pos_y / 1000., xc, yc, h, w]) data_columns = ["frame_id", "agent_id", "pos_x", "pos_y", "xc", "yc", "h", "w"] raw_dataset = pd.DataFrame(np.array(loaded_data), columns=data_columns) traj_dataset.title = kwargs.get('title', "PETS") # copy columns traj_dataset.data[["frame_id", "agent_id", "pos_x", "pos_y"]] = \ raw_dataset[["frame_id", "agent_id", "pos_x", "pos_y"]] traj_dataset.data["scene_id"] = kwargs.get('scene_id', 0) traj_dataset.data["label"] = "pedestrian" # post-process fps = kwargs.get('fps', 7) sampling_rate = kwargs.get('sampling_rate', 1) use_kalman = kwargs.get('use_kalman', False) traj_dataset.postprocess(fps=fps, sampling_rate=sampling_rate, use_kalman=use_kalman) return traj_dataset
34.693694
96
0.622176
import xml.etree.ElementTree as et import numpy as np import pandas as pd from opentraj.toolkit.core.trajdataset import TrajDataset from opentraj.toolkit.utils.calibration.camera_calibration_tsai import * def load_pets(path, **kwargs): traj_dataset = TrajDataset() annot_xtree = et.parse(path) annot_xroot = annot_xtree.getroot() cp, cc = None, None calib_path = kwargs.get('calib_path', "") if calib_path: cp = CameraParameters() cc = CalibrationConstants() calib_xtree = et.parse(calib_path) calib_xroot = calib_xtree.getroot() geometry_node = calib_xroot.find("Geometry") width = int(geometry_node.attrib["width"]) height = int(geometry_node.attrib["height"]) cp.Ncx = float(geometry_node.attrib["ncx"]) cp.Nfx = float(geometry_node.attrib["nfx"]) cp.dx = float(geometry_node.attrib["dx"]) cp.dy = float(geometry_node.attrib["dy"]) cp.dpx = float(geometry_node.attrib["dpx"]) cp.dpy = float(geometry_node.attrib["dpy"]) intrinsic_node = calib_xroot.find("Intrinsic") cc.f = float(intrinsic_node.attrib["focal"]) cc.kappa1 = float(intrinsic_node.attrib["kappa1"]) cp.Cx = float(intrinsic_node.attrib["cx"]) cp.Cy = float(intrinsic_node.attrib["cy"]) cp.sx = float(intrinsic_node.attrib["sx"]) extrinsic_node = calib_xroot.find("Extrinsic") cc.Tx = float(extrinsic_node.attrib["tx"]) cc.Ty = float(extrinsic_node.attrib["ty"]) cc.Tz = float(extrinsic_node.attrib["tz"]) cc.Rx = float(extrinsic_node.attrib["rx"]) cc.Ry = float(extrinsic_node.attrib["ry"]) cc.Rz = float(extrinsic_node.attrib["rz"]) cc.calc_rr() loaded_data = [] for frame_node in annot_xroot: objectlist_node = frame_node.find("objectlist") object_nodes = objectlist_node.findall("object") frame_id = int(frame_node.attrib.get("number")) for obj_node in object_nodes: agent_id = obj_node.attrib["id"] box_node = obj_node.find("box") xc = float(box_node.attrib["xc"]) yc = float(box_node.attrib["yc"]) h = float(box_node.attrib["h"]) w = float(box_node.attrib["w"]) x_ground = xc y_ground = yc + h/2 if cp: pos_x, pos_y = image_coord_to_world_coord(x_ground, y_ground, 0, cp, cc) else: pos_x, pos_y = np.nan, np.nan loaded_data.append([frame_id, agent_id, pos_x / 1000., pos_y / 1000., xc, yc, h, w]) data_columns = ["frame_id", "agent_id", "pos_x", "pos_y", "xc", "yc", "h", "w"] raw_dataset = pd.DataFrame(np.array(loaded_data), columns=data_columns) traj_dataset.title = kwargs.get('title', "PETS") traj_dataset.data[["frame_id", "agent_id", "pos_x", "pos_y"]] = \ raw_dataset[["frame_id", "agent_id", "pos_x", "pos_y"]] traj_dataset.data["scene_id"] = kwargs.get('scene_id', 0) traj_dataset.data["label"] = "pedestrian" fps = kwargs.get('fps', 7) sampling_rate = kwargs.get('sampling_rate', 1) use_kalman = kwargs.get('use_kalman', False) traj_dataset.postprocess(fps=fps, sampling_rate=sampling_rate, use_kalman=use_kalman) return traj_dataset
true
true
f703e382437464750f398a5b77d27612a40e8529
7,578
py
Python
config/settings/production.py
danieldourado/estatisticas_facebook_cookiecutter
896903825998f0dee5a6b5d9aa34160f123bd15d
[ "MIT" ]
2
2017-12-22T01:00:22.000Z
2017-12-22T11:14:40.000Z
config/settings/production.py
danieldourado/estatisticas_facebook_cookiecutter
896903825998f0dee5a6b5d9aa34160f123bd15d
[ "MIT" ]
18
2017-12-14T12:04:45.000Z
2022-03-11T23:23:05.000Z
config/settings/production.py
danieldourado/estatisticas_facebook_cookiecutter
896903825998f0dee5a6b5d9aa34160f123bd15d
[ "MIT" ]
1
2021-03-27T16:18:56.000Z
2021-03-27T16:18:56.000Z
""" Production settings for Estatisticas Facebook project. - Use WhiteNoise for serving static files - Use Amazon's S3 for storing uploaded media - Use mailgun to send emails - Use Redis for cache - Use sentry for error logging """ import logging from .base import * # noqa # SECRET CONFIGURATION # ------------------------------------------------------------------------------ # See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key # Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ SECRET_KEY = env('DJANGO_SECRET_KEY') # This ensures that Django will be able to detect a secure connection # properly on Heroku. SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # raven sentry client # See https://docs.sentry.io/clients/python/integrations/django/ INSTALLED_APPS += ['raven.contrib.django.raven_compat', ] # Use Whitenoise to serve static files # See: https://whitenoise.readthedocs.io/ WHITENOISE_MIDDLEWARE = ['whitenoise.middleware.WhiteNoiseMiddleware', ] MIDDLEWARE = WHITENOISE_MIDDLEWARE + MIDDLEWARE RAVEN_MIDDLEWARE = ['raven.contrib.django.raven_compat.middleware.SentryResponseErrorIdMiddleware'] MIDDLEWARE = RAVEN_MIDDLEWARE + MIDDLEWARE # SECURITY CONFIGURATION # ------------------------------------------------------------------------------ # See https://docs.djangoproject.com/en/dev/ref/middleware/#module-django.middleware.security # and https://docs.djangoproject.com/en/dev/howto/deployment/checklist/#run-manage-py-check-deploy # set this to 60 seconds and then to 518400 when you can prove it works SECURE_HSTS_SECONDS = 60 SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool( 'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True) SECURE_CONTENT_TYPE_NOSNIFF = env.bool( 'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True) SECURE_BROWSER_XSS_FILTER = True SESSION_COOKIE_SECURE = True SESSION_COOKIE_HTTPONLY = True SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True) CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' # SITE CONFIGURATION # ------------------------------------------------------------------------------ # Hosts/domain names that are valid for this site # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['danieldourado.com', ]) # END SITE CONFIGURATION INSTALLED_APPS += ['gunicorn', ] # STORAGE CONFIGURATION # ------------------------------------------------------------------------------ # Uploaded Media Files # ------------------------ # See: http://django-storages.readthedocs.io/en/latest/index.html INSTALLED_APPS += ['storages', ] AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY') AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME') AWS_AUTO_CREATE_BUCKET = True AWS_QUERYSTRING_AUTH = False # AWS cache settings, don't change unless you know what you're doing: AWS_EXPIRY = 60 * 60 * 24 * 7 # TODO See: https://github.com/jschneier/django-storages/issues/47 # Revert the following and use str after the above-mentioned bug is fixed in # either django-storage-redux or boto control = 'max-age=%d, s-maxage=%d, must-revalidate' % (AWS_EXPIRY, AWS_EXPIRY) AWS_HEADERS = { 'Cache-Control': bytes(control, encoding='latin-1') } # URL that handles the media served from MEDIA_ROOT, used for managing # stored files. MEDIA_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' # Static Assets # ------------------------ STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' # EMAIL # ------------------------------------------------------------------------------ DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL', default='Estatisticas Facebook <noreply@danieldourado.com>') EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[Estatisticas Facebook]') SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL) # Anymail with Mailgun INSTALLED_APPS += ['anymail', ] ANYMAIL = { 'MAILGUN_API_KEY': env('DJANGO_MAILGUN_API_KEY'), 'MAILGUN_SENDER_DOMAIN': env('MAILGUN_SENDER_DOMAIN') } EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend' # TEMPLATE CONFIGURATION # ------------------------------------------------------------------------------ # See: # https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader TEMPLATES[0]['OPTIONS']['loaders'] = [ ('django.template.loaders.cached.Loader', [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]), ] # DATABASE CONFIGURATION # ------------------------------------------------------------------------------ # Use the Heroku-style specification # Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ DATABASES['default'] = env.db('DATABASE_URL') DATABASES['default']['CONN_MAX_AGE'] = env.int('CONN_MAX_AGE', default=60) # CACHING # ------------------------------------------------------------------------------ REDIS_LOCATION = '{0}/{1}'.format(env('REDIS_URL', default='redis://127.0.0.1:6379'), 0) # Heroku URL does not pass the DB number, so we parse it in CACHES = { 'default': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': REDIS_LOCATION, 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.DefaultClient', 'IGNORE_EXCEPTIONS': True, # mimics memcache behavior. # http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior } } } # Sentry Configuration SENTRY_DSN = env('DJANGO_SENTRY_DSN') SENTRY_CLIENT = env('DJANGO_SENTRY_CLIENT', default='raven.contrib.django.raven_compat.DjangoClient') LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'root': { 'level': 'WARNING', 'handlers': ['sentry', ], }, 'formatters': { 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s ' '%(process)d %(thread)d %(message)s' }, }, 'handlers': { 'sentry': { 'level': 'ERROR', 'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler', }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose' } }, 'loggers': { 'django.db.backends': { 'level': 'ERROR', 'handlers': ['console', ], 'propagate': False, }, 'raven': { 'level': 'DEBUG', 'handlers': ['console', ], 'propagate': False, }, 'sentry.errors': { 'level': 'DEBUG', 'handlers': ['console', ], 'propagate': False, }, 'django.security.DisallowedHost': { 'level': 'ERROR', 'handlers': ['console', 'sentry', ], 'propagate': False, }, }, } SENTRY_CELERY_LOGLEVEL = env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO) RAVEN_CONFIG = { 'CELERY_LOGLEVEL': env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO), 'DSN': SENTRY_DSN } # Custom Admin URL, use {% url 'admin:index' %} ADMIN_URL = env('DJANGO_ADMIN_URL') # Your production stuff: Below this line define 3rd party library settings # ------------------------------------------------------------------------------
34.921659
117
0.624571
import logging from .base import * SECRET_KEY = env('DJANGO_SECRET_KEY') SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') INSTALLED_APPS += ['raven.contrib.django.raven_compat', ] WHITENOISE_MIDDLEWARE = ['whitenoise.middleware.WhiteNoiseMiddleware', ] MIDDLEWARE = WHITENOISE_MIDDLEWARE + MIDDLEWARE RAVEN_MIDDLEWARE = ['raven.contrib.django.raven_compat.middleware.SentryResponseErrorIdMiddleware'] MIDDLEWARE = RAVEN_MIDDLEWARE + MIDDLEWARE SECURE_HSTS_SECONDS = 60 SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool( 'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True) SECURE_CONTENT_TYPE_NOSNIFF = env.bool( 'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True) SECURE_BROWSER_XSS_FILTER = True SESSION_COOKIE_SECURE = True SESSION_COOKIE_HTTPONLY = True SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True) CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['danieldourado.com', ]) INSTALLED_APPS += ['gunicorn', ] INSTALLED_APPS += ['storages', ] AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY') AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME') AWS_AUTO_CREATE_BUCKET = True AWS_QUERYSTRING_AUTH = False AWS_EXPIRY = 60 * 60 * 24 * 7 control = 'max-age=%d, s-maxage=%d, must-revalidate' % (AWS_EXPIRY, AWS_EXPIRY) AWS_HEADERS = { 'Cache-Control': bytes(control, encoding='latin-1') } MEDIA_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL', default='Estatisticas Facebook <noreply@danieldourado.com>') EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[Estatisticas Facebook]') SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL) INSTALLED_APPS += ['anymail', ] ANYMAIL = { 'MAILGUN_API_KEY': env('DJANGO_MAILGUN_API_KEY'), 'MAILGUN_SENDER_DOMAIN': env('MAILGUN_SENDER_DOMAIN') } EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend' TEMPLATES[0]['OPTIONS']['loaders'] = [ ('django.template.loaders.cached.Loader', [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]), ] DATABASES['default'] = env.db('DATABASE_URL') DATABASES['default']['CONN_MAX_AGE'] = env.int('CONN_MAX_AGE', default=60) REDIS_LOCATION = '{0}/{1}'.format(env('REDIS_URL', default='redis://127.0.0.1:6379'), 0) CACHES = { 'default': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': REDIS_LOCATION, 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.DefaultClient', 'IGNORE_EXCEPTIONS': True, } } } SENTRY_DSN = env('DJANGO_SENTRY_DSN') SENTRY_CLIENT = env('DJANGO_SENTRY_CLIENT', default='raven.contrib.django.raven_compat.DjangoClient') LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'root': { 'level': 'WARNING', 'handlers': ['sentry', ], }, 'formatters': { 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s ' '%(process)d %(thread)d %(message)s' }, }, 'handlers': { 'sentry': { 'level': 'ERROR', 'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler', }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose' } }, 'loggers': { 'django.db.backends': { 'level': 'ERROR', 'handlers': ['console', ], 'propagate': False, }, 'raven': { 'level': 'DEBUG', 'handlers': ['console', ], 'propagate': False, }, 'sentry.errors': { 'level': 'DEBUG', 'handlers': ['console', ], 'propagate': False, }, 'django.security.DisallowedHost': { 'level': 'ERROR', 'handlers': ['console', 'sentry', ], 'propagate': False, }, }, } SENTRY_CELERY_LOGLEVEL = env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO) RAVEN_CONFIG = { 'CELERY_LOGLEVEL': env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO), 'DSN': SENTRY_DSN } ADMIN_URL = env('DJANGO_ADMIN_URL')
true
true
f703e3a077b4e41b0e27387b9ed3ca54503418ad
18,606
py
Python
pyiron/vasp/potential.py
t-brink/pyiron
c07552b54a39e3f036ba395325cd4b372af0f794
[ "BSD-3-Clause" ]
null
null
null
pyiron/vasp/potential.py
t-brink/pyiron
c07552b54a39e3f036ba395325cd4b372af0f794
[ "BSD-3-Clause" ]
1
2021-11-02T09:22:56.000Z
2021-11-02T09:22:56.000Z
pyiron/vasp/potential.py
t-brink/pyiron
c07552b54a39e3f036ba395325cd4b372af0f794
[ "BSD-3-Clause" ]
1
2021-11-02T08:35:47.000Z
2021-11-02T08:35:47.000Z
# coding: utf-8 # Copyright (c) Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department # Distributed under the terms of "New BSD License", see the LICENSE file. import os import posixpath import numpy as np import pandas import tables import warnings from pyiron_base import GenericParameters, Settings from pyiron.atomistics.job.potentials import PotentialAbstract, find_potential_file_base __author__ = "Jan Janssen" __copyright__ = ( "Copyright 2020, Max-Planck-Institut für Eisenforschung GmbH - " "Computational Materials Design (CM) Department" ) __version__ = "1.0" __maintainer__ = "Jan Janssen" __email__ = "janssen@mpie.de" __status__ = "development" __date__ = "Sep 1, 2017" s = Settings() class VaspPotentialAbstract(PotentialAbstract): """ Args: potential_df: default_df: selected_atoms: """ def __init__(self, potential_df=None, default_df=None, selected_atoms=None): if potential_df is None: potential_df = self._get_potential_df( plugin_name="vasp", file_name_lst={"potentials_vasp.csv"}, backward_compatibility_name="vasppotentials", ) super(VaspPotentialAbstract, self).__init__( potential_df=potential_df, default_df=default_df, selected_atoms=selected_atoms, ) def default(self): if self._default_df is not None: return pandas.concat( [ self._potential_df[ ( self._potential_df["Name"] == self._default_df.loc[atom].values[0] ) ] for atom in self._selected_atoms ] ) return None def find_default(self, element): if isinstance(element, set): element = element elif isinstance(element, list): element = set(element) elif isinstance(element, str): element = set([element]) else: raise TypeError("Only, str, list and set supported!") element_lst = list(element) if self._default_df is not None: merged_lst = list(set(self._selected_atoms + element_lst)) return pandas.concat( [ self._potential_df[ ( self._potential_df["Name"] == self._default_df.loc[atom].values[0] ) ] for atom in merged_lst ] ) return None def find(self, element): if isinstance(element, set): element = element elif isinstance(element, list): element = set(element) elif isinstance(element, str): element = set([element]) else: raise TypeError("Only, str, list and set supported!") element_lst = list(element) merged_lst = list(set(self._selected_atoms + element_lst)) return pandas.concat( [super(VaspPotentialAbstract, self).find({atom}) for atom in merged_lst] ) def list(self): if len(self._selected_atoms) != 0: return pandas.concat( [ super(VaspPotentialAbstract, self).find({atom}) for atom in self._selected_atoms ] ) else: return pandas.DataFrame({}) def list_potential_names(self): df = self.list() if len(df) != 0: return list(self.list()["Name"]) else: return [] @staticmethod def _return_potential_file(file_name): for resource_path in s.resource_paths: resource_path_potcar = os.path.join( resource_path, "vasp", "potentials", file_name ) if os.path.exists(resource_path_potcar): return resource_path_potcar return None def __dir__(self): return [val.replace("-", "_") for val in self.list_potential_names()] def __getitem__(self, item): item_replace = item.replace("_gga_pbe", "-gga-pbe").replace("_lda", "-lda") if item_replace in self.list_potential_names(): df = self.list() return self._return_potential_file( file_name=list(df[df["Name"] == item_replace]["Filename"])[0][0] ) selected_atoms = self._selected_atoms + [item] return VaspPotentialAbstract( potential_df=self._potential_df, default_df=self._default_df, selected_atoms=selected_atoms, ) class VaspPotentialFile(VaspPotentialAbstract): """ The Potential class is derived from the PotentialAbstract class, but instead of loading the potentials from a list, the potentials are loaded from a file. Args: xc (str): Exchange correlation functional ['PBE', 'LDA'] """ def __init__(self, xc=None, selected_atoms=None): potential_df = self._get_potential_df( plugin_name="vasp", file_name_lst={"potentials_vasp.csv"}, backward_compatibility_name="vasppotentials", ) if xc == "PBE": default_df = self._get_potential_default_df( plugin_name="vasp", file_name_lst={"potentials_vasp_pbe_default.csv"}, backward_compatibility_name="defaultvasppbe", ) potential_df = potential_df[(potential_df["Model"] == "gga-pbe")] elif xc == "GGA": default_df = self._get_potential_default_df( plugin_name="vasp", file_name_lst={"potentials_vasp_pbe_default.csv"}, backward_compatibility_name="defaultvasppbe", ) potential_df = potential_df[(potential_df["Model"] == "gga-pbe")] elif xc == "LDA": default_df = self._get_potential_default_df( plugin_name="vasp", file_name_lst={"potentials_vasp_lda_default.csv"}, backward_compatibility_name="defaultvasplda", ) potential_df = potential_df[(potential_df["Model"] == "lda")] else: raise ValueError( 'The exchange correlation functional has to be set and it can either be "LDA" or "PBE"' ) super(VaspPotentialFile, self).__init__( potential_df=potential_df, default_df=default_df, selected_atoms=selected_atoms, ) def add_new_element(self, parent_element, new_element): """ Adding a new user defined element with a different POTCAR file. It is assumed that the file exists Args: parent_element (str): Parent element new_element (str): Name of the new element (the name of the folder where the new POTCAR file exists """ ds = self.find_default(element=parent_element) ds["Species"].values[0][0] = new_element path_list = ds["Filename"].values[0][0].split("/") path_list[-2] = new_element name_list = ds["Name"].values[0].split("-") name_list[0] = new_element ds["Name"].values[0] = "-".join(name_list) ds["Filename"].values[0][0] = "/".join(path_list) self._potential_df = self._potential_df.append(ds) if new_element not in self._default_df.index.values: ds = pandas.Series() ds.name = new_element ds["Name"] = "-".join(name_list) self._default_df = self._default_df.append(ds) else: self._default_df.loc[new_element] = "-".join(name_list) class VaspPotential(object): """ The Potential class is derived from the PotentialAbstract class, but instead of loading the potentials from a list, the potentials are loaded from a file. Args: path (str): path to the potential list """ def __init__(self, selected_atoms=None): self.pbe = VaspPotentialFile(xc="PBE", selected_atoms=selected_atoms) self.lda = VaspPotentialFile(xc="LDA", selected_atoms=selected_atoms) class VaspPotentialSetter(object): def __init__(self, element_lst): super(VaspPotentialSetter, self).__setattr__("_element_lst", element_lst) super(VaspPotentialSetter, self).__setattr__( "_potential_dict", {el: None for el in element_lst} ) def __getattr__(self, item): if item in self._element_lst: return item else: raise AttributeError def __setitem__(self, key, value): self.__setattr__(key=key, value=value) def __setattr__(self, key, value): if key in self._element_lst: self._potential_dict[key] = value else: raise AttributeError def to_dict(self): return self._potential_dict def __repr__(self): return self._potential_dict.__repr__() def find_potential_file(path): return find_potential_file_base( path=path, resource_path_lst=s.resource_paths, rel_path=os.path.join("vasp", "potentials") ) def get_enmax_among_species(symbol_lst, return_list=False, xc="PBE"): """ DEPRECATED: Please use `get_enmax_among_potentials`. Given a list of species symbols, finds the largest applicable encut. Args: symbol_lst (list): The list of species symbols. return_list (bool): Whether to return the list of all ENMAX values (in the same order as `species_lst` along with the largest value). (Default is False.) xc ("GGA"/"PBE"/"LDA"): The exchange correlation functional for which the POTCARs were generated. (Default is "PBE".) Returns: (float): The largest ENMAX among the POTCAR files for all the species. [optional](list): The ENMAX value corresponding to each species. """ warnings.warn(("get_enmax_among_species is deprecated as of v0.3.0. Please use get_enmax_among_potentials and note " + "the adjustment to the signature (*args instead of list)"), DeprecationWarning) return get_enmax_among_potentials(*symbol_lst, return_list=return_list, xc=xc) def get_enmax_among_potentials(*names, return_list=False, xc="PBE"): """ Given potential names without XC information or elemental symbols, look over all the corresponding POTCAR files and find the largest ENMAX value. e.g. `get_enmax_among_potentials('Mg', 'Al_GW', 'Ca_pv', 'Ca_sv', xc='LDA')` Args: *names (str): Names of potentials or elemental symbols return_list (bool): Whether to return the list of all ENMAX values (in the same order as `names` as a second return value after providing the largest value). (Default is False.) xc ("GGA"/"PBE"/"LDA"): The exchange correlation functional for which the POTCARs were generated. (Default is "PBE".) Returns: (float): The largest ENMAX among the POTCAR files for all the requested names. [optional](list): The ENMAX value corresponding to each species. """ def _get_just_element_from_name(name): return name.split('_')[0] def _get_index_of_exact_match(name, potential_names): try: return np.argwhere([name == strip_xc_from_potential_name(pn) for pn in potential_names])[0, 0] except IndexError: raise ValueError("Couldn't find {} among potential names for {}".format(name, _get_just_element_from_name(name))) def _get_potcar_filename(name, exch_corr): potcar_table = VaspPotentialFile(xc=exch_corr).find(_get_just_element_from_name(name)) return potcar_table['Filename'].values[ _get_index_of_exact_match(name, potcar_table['Name'].values) ][0] enmax_lst = [] for n in names: with open(find_potential_file(path=_get_potcar_filename(n, xc))) as pf: for i, line in enumerate(pf): if i == 14: encut_str = line.split()[2][:-1] enmax_lst.append(float(encut_str)) break if return_list: return max(enmax_lst), enmax_lst else: return max(enmax_lst) def strip_xc_from_potential_name(name): return name.split('-')[0] class Potcar(GenericParameters): pot_path_dict = {"GGA": "paw-gga-pbe", "PBE": "paw-gga-pbe", "LDA": "paw-lda"} def __init__(self, input_file_name=None, table_name="potcar"): GenericParameters.__init__( self, input_file_name=input_file_name, table_name=table_name, val_only=False, comment_char="#", ) self._structure = None self.electrons_per_atom_lst = list() self.max_cutoff_lst = list() self.el_path_lst = list() self.el_path_dict = dict() self.modified_elements = dict() def potcar_set_structure(self, structure, modified_elements): self._structure = structure self._set_default_path_dict() self._set_potential_paths() self.modified_elements = modified_elements def modify(self, **modify): if "xc" in modify: xc_type = modify["xc"] self._set_default_path_dict() if xc_type not in self.pot_path_dict: raise ValueError("xc type not implemented: " + xc_type) GenericParameters.modify(self, **modify) if self._structure is not None: self._set_potential_paths() def _set_default_path_dict(self): if self._structure is None: return vasp_potentials = VaspPotentialFile(xc=self.get("xc")) for i, el_obj in enumerate(self._structure.get_species_objects()): if isinstance(el_obj.Parent, str): el = el_obj.Parent else: el = el_obj.Abbreviation if isinstance(el_obj.tags, dict): if "pseudo_potcar_file" in el_obj.tags.keys(): new_element = el_obj.tags["pseudo_potcar_file"] vasp_potentials.add_new_element( parent_element=el, new_element=new_element ) key = vasp_potentials.find_default(el).Species.values[0][0] val = vasp_potentials.find_default(el).Name.values[0] self[key] = val def _set_potential_paths(self): element_list = ( self._structure.get_species_symbols() ) # .ElementList.getSpecies() object_list = self._structure.get_species_objects() s.logger.debug("element list: {0}".format(element_list)) self.el_path_lst = list() try: xc = self.get("xc") except tables.exceptions.NoSuchNodeError: xc = self.get("xc") s.logger.debug("XC: {0}".format(xc)) vasp_potentials = VaspPotentialFile(xc=xc) for i, el_obj in enumerate(object_list): if isinstance(el_obj.Parent, str): el = el_obj.Parent else: el = el_obj.Abbreviation if ( isinstance(el_obj.tags, dict) and "pseudo_potcar_file" in el_obj.tags.keys() ): new_element = el_obj.tags["pseudo_potcar_file"] vasp_potentials.add_new_element( parent_element=el, new_element=new_element ) el_path = find_potential_file( path=vasp_potentials.find_default(new_element)["Filename"].values[ 0 ][0] ) if not (os.path.isfile(el_path)): raise ValueError("such a file does not exist in the pp directory") elif el in self.modified_elements.keys(): new_element = self.modified_elements[el] if os.path.isabs(new_element): el_path = new_element else: vasp_potentials.add_new_element( parent_element=el, new_element=new_element ) el_path = find_potential_file( path=vasp_potentials.find_default(new_element)["Filename"].values[ 0 ][0] ) else: el_path = find_potential_file( path=vasp_potentials.find_default(el)["Filename"].values[0][0] ) if not (os.path.isfile(el_path)): raise AssertionError() pot_name = "pot_" + str(i) if pot_name in self._dataset["Parameter"]: try: ind = self._dataset["Parameter"].index(pot_name) except (ValueError, IndexError): indices = np.core.defchararray.find( self._dataset["Parameter"], pot_name ) ind = np.where(indices == 0)[0][0] self._dataset["Value"][ind] = el_path self._dataset["Comment"][ind] = "" else: self._dataset["Parameter"].append("pot_" + str(i)) self._dataset["Value"].append(el_path) self._dataset["Comment"].append("") self.el_path_lst.append(el_path) def write_file(self, file_name, cwd=None): """ Args: file_name: cwd: Returns: """ self.electrons_per_atom_lst = list() self.max_cutoff_lst = list() self._set_potential_paths() if cwd is not None: file_name = posixpath.join(cwd, file_name) f = open(file_name, "w") for el_file in self.el_path_lst: with open(el_file) as pot_file: for i, line in enumerate(pot_file): f.write(line) if i == 1: self.electrons_per_atom_lst.append(int(float(line))) elif i == 14: mystr = line.split()[2][:-1] self.max_cutoff_lst.append(float(mystr)) f.close() def load_default(self): file_content = """\ xc GGA # LDA, GGA """ self.load_string(file_content)
37.063745
125
0.581587
import os import posixpath import numpy as np import pandas import tables import warnings from pyiron_base import GenericParameters, Settings from pyiron.atomistics.job.potentials import PotentialAbstract, find_potential_file_base __author__ = "Jan Janssen" __copyright__ = ( "Copyright 2020, Max-Planck-Institut für Eisenforschung GmbH - " "Computational Materials Design (CM) Department" ) __version__ = "1.0" __maintainer__ = "Jan Janssen" __email__ = "janssen@mpie.de" __status__ = "development" __date__ = "Sep 1, 2017" s = Settings() class VaspPotentialAbstract(PotentialAbstract): def __init__(self, potential_df=None, default_df=None, selected_atoms=None): if potential_df is None: potential_df = self._get_potential_df( plugin_name="vasp", file_name_lst={"potentials_vasp.csv"}, backward_compatibility_name="vasppotentials", ) super(VaspPotentialAbstract, self).__init__( potential_df=potential_df, default_df=default_df, selected_atoms=selected_atoms, ) def default(self): if self._default_df is not None: return pandas.concat( [ self._potential_df[ ( self._potential_df["Name"] == self._default_df.loc[atom].values[0] ) ] for atom in self._selected_atoms ] ) return None def find_default(self, element): if isinstance(element, set): element = element elif isinstance(element, list): element = set(element) elif isinstance(element, str): element = set([element]) else: raise TypeError("Only, str, list and set supported!") element_lst = list(element) if self._default_df is not None: merged_lst = list(set(self._selected_atoms + element_lst)) return pandas.concat( [ self._potential_df[ ( self._potential_df["Name"] == self._default_df.loc[atom].values[0] ) ] for atom in merged_lst ] ) return None def find(self, element): if isinstance(element, set): element = element elif isinstance(element, list): element = set(element) elif isinstance(element, str): element = set([element]) else: raise TypeError("Only, str, list and set supported!") element_lst = list(element) merged_lst = list(set(self._selected_atoms + element_lst)) return pandas.concat( [super(VaspPotentialAbstract, self).find({atom}) for atom in merged_lst] ) def list(self): if len(self._selected_atoms) != 0: return pandas.concat( [ super(VaspPotentialAbstract, self).find({atom}) for atom in self._selected_atoms ] ) else: return pandas.DataFrame({}) def list_potential_names(self): df = self.list() if len(df) != 0: return list(self.list()["Name"]) else: return [] @staticmethod def _return_potential_file(file_name): for resource_path in s.resource_paths: resource_path_potcar = os.path.join( resource_path, "vasp", "potentials", file_name ) if os.path.exists(resource_path_potcar): return resource_path_potcar return None def __dir__(self): return [val.replace("-", "_") for val in self.list_potential_names()] def __getitem__(self, item): item_replace = item.replace("_gga_pbe", "-gga-pbe").replace("_lda", "-lda") if item_replace in self.list_potential_names(): df = self.list() return self._return_potential_file( file_name=list(df[df["Name"] == item_replace]["Filename"])[0][0] ) selected_atoms = self._selected_atoms + [item] return VaspPotentialAbstract( potential_df=self._potential_df, default_df=self._default_df, selected_atoms=selected_atoms, ) class VaspPotentialFile(VaspPotentialAbstract): def __init__(self, xc=None, selected_atoms=None): potential_df = self._get_potential_df( plugin_name="vasp", file_name_lst={"potentials_vasp.csv"}, backward_compatibility_name="vasppotentials", ) if xc == "PBE": default_df = self._get_potential_default_df( plugin_name="vasp", file_name_lst={"potentials_vasp_pbe_default.csv"}, backward_compatibility_name="defaultvasppbe", ) potential_df = potential_df[(potential_df["Model"] == "gga-pbe")] elif xc == "GGA": default_df = self._get_potential_default_df( plugin_name="vasp", file_name_lst={"potentials_vasp_pbe_default.csv"}, backward_compatibility_name="defaultvasppbe", ) potential_df = potential_df[(potential_df["Model"] == "gga-pbe")] elif xc == "LDA": default_df = self._get_potential_default_df( plugin_name="vasp", file_name_lst={"potentials_vasp_lda_default.csv"}, backward_compatibility_name="defaultvasplda", ) potential_df = potential_df[(potential_df["Model"] == "lda")] else: raise ValueError( 'The exchange correlation functional has to be set and it can either be "LDA" or "PBE"' ) super(VaspPotentialFile, self).__init__( potential_df=potential_df, default_df=default_df, selected_atoms=selected_atoms, ) def add_new_element(self, parent_element, new_element): ds = self.find_default(element=parent_element) ds["Species"].values[0][0] = new_element path_list = ds["Filename"].values[0][0].split("/") path_list[-2] = new_element name_list = ds["Name"].values[0].split("-") name_list[0] = new_element ds["Name"].values[0] = "-".join(name_list) ds["Filename"].values[0][0] = "/".join(path_list) self._potential_df = self._potential_df.append(ds) if new_element not in self._default_df.index.values: ds = pandas.Series() ds.name = new_element ds["Name"] = "-".join(name_list) self._default_df = self._default_df.append(ds) else: self._default_df.loc[new_element] = "-".join(name_list) class VaspPotential(object): def __init__(self, selected_atoms=None): self.pbe = VaspPotentialFile(xc="PBE", selected_atoms=selected_atoms) self.lda = VaspPotentialFile(xc="LDA", selected_atoms=selected_atoms) class VaspPotentialSetter(object): def __init__(self, element_lst): super(VaspPotentialSetter, self).__setattr__("_element_lst", element_lst) super(VaspPotentialSetter, self).__setattr__( "_potential_dict", {el: None for el in element_lst} ) def __getattr__(self, item): if item in self._element_lst: return item else: raise AttributeError def __setitem__(self, key, value): self.__setattr__(key=key, value=value) def __setattr__(self, key, value): if key in self._element_lst: self._potential_dict[key] = value else: raise AttributeError def to_dict(self): return self._potential_dict def __repr__(self): return self._potential_dict.__repr__() def find_potential_file(path): return find_potential_file_base( path=path, resource_path_lst=s.resource_paths, rel_path=os.path.join("vasp", "potentials") ) def get_enmax_among_species(symbol_lst, return_list=False, xc="PBE"): warnings.warn(("get_enmax_among_species is deprecated as of v0.3.0. Please use get_enmax_among_potentials and note " + "the adjustment to the signature (*args instead of list)"), DeprecationWarning) return get_enmax_among_potentials(*symbol_lst, return_list=return_list, xc=xc) def get_enmax_among_potentials(*names, return_list=False, xc="PBE"): def _get_just_element_from_name(name): return name.split('_')[0] def _get_index_of_exact_match(name, potential_names): try: return np.argwhere([name == strip_xc_from_potential_name(pn) for pn in potential_names])[0, 0] except IndexError: raise ValueError("Couldn't find {} among potential names for {}".format(name, _get_just_element_from_name(name))) def _get_potcar_filename(name, exch_corr): potcar_table = VaspPotentialFile(xc=exch_corr).find(_get_just_element_from_name(name)) return potcar_table['Filename'].values[ _get_index_of_exact_match(name, potcar_table['Name'].values) ][0] enmax_lst = [] for n in names: with open(find_potential_file(path=_get_potcar_filename(n, xc))) as pf: for i, line in enumerate(pf): if i == 14: encut_str = line.split()[2][:-1] enmax_lst.append(float(encut_str)) break if return_list: return max(enmax_lst), enmax_lst else: return max(enmax_lst) def strip_xc_from_potential_name(name): return name.split('-')[0] class Potcar(GenericParameters): pot_path_dict = {"GGA": "paw-gga-pbe", "PBE": "paw-gga-pbe", "LDA": "paw-lda"} def __init__(self, input_file_name=None, table_name="potcar"): GenericParameters.__init__( self, input_file_name=input_file_name, table_name=table_name, val_only=False, comment_char="#", ) self._structure = None self.electrons_per_atom_lst = list() self.max_cutoff_lst = list() self.el_path_lst = list() self.el_path_dict = dict() self.modified_elements = dict() def potcar_set_structure(self, structure, modified_elements): self._structure = structure self._set_default_path_dict() self._set_potential_paths() self.modified_elements = modified_elements def modify(self, **modify): if "xc" in modify: xc_type = modify["xc"] self._set_default_path_dict() if xc_type not in self.pot_path_dict: raise ValueError("xc type not implemented: " + xc_type) GenericParameters.modify(self, **modify) if self._structure is not None: self._set_potential_paths() def _set_default_path_dict(self): if self._structure is None: return vasp_potentials = VaspPotentialFile(xc=self.get("xc")) for i, el_obj in enumerate(self._structure.get_species_objects()): if isinstance(el_obj.Parent, str): el = el_obj.Parent else: el = el_obj.Abbreviation if isinstance(el_obj.tags, dict): if "pseudo_potcar_file" in el_obj.tags.keys(): new_element = el_obj.tags["pseudo_potcar_file"] vasp_potentials.add_new_element( parent_element=el, new_element=new_element ) key = vasp_potentials.find_default(el).Species.values[0][0] val = vasp_potentials.find_default(el).Name.values[0] self[key] = val def _set_potential_paths(self): element_list = ( self._structure.get_species_symbols() ) # .ElementList.getSpecies() object_list = self._structure.get_species_objects() s.logger.debug("element list: {0}".format(element_list)) self.el_path_lst = list() try: xc = self.get("xc") except tables.exceptions.NoSuchNodeError: xc = self.get("xc") s.logger.debug("XC: {0}".format(xc)) vasp_potentials = VaspPotentialFile(xc=xc) for i, el_obj in enumerate(object_list): if isinstance(el_obj.Parent, str): el = el_obj.Parent else: el = el_obj.Abbreviation if ( isinstance(el_obj.tags, dict) and "pseudo_potcar_file" in el_obj.tags.keys() ): new_element = el_obj.tags["pseudo_potcar_file"] vasp_potentials.add_new_element( parent_element=el, new_element=new_element ) el_path = find_potential_file( path=vasp_potentials.find_default(new_element)["Filename"].values[ 0 ][0] ) if not (os.path.isfile(el_path)): raise ValueError("such a file does not exist in the pp directory") elif el in self.modified_elements.keys(): new_element = self.modified_elements[el] if os.path.isabs(new_element): el_path = new_element else: vasp_potentials.add_new_element( parent_element=el, new_element=new_element ) el_path = find_potential_file( path=vasp_potentials.find_default(new_element)["Filename"].values[ 0 ][0] ) else: el_path = find_potential_file( path=vasp_potentials.find_default(el)["Filename"].values[0][0] ) if not (os.path.isfile(el_path)): raise AssertionError() pot_name = "pot_" + str(i) if pot_name in self._dataset["Parameter"]: try: ind = self._dataset["Parameter"].index(pot_name) except (ValueError, IndexError): indices = np.core.defchararray.find( self._dataset["Parameter"], pot_name ) ind = np.where(indices == 0)[0][0] self._dataset["Value"][ind] = el_path self._dataset["Comment"][ind] = "" else: self._dataset["Parameter"].append("pot_" + str(i)) self._dataset["Value"].append(el_path) self._dataset["Comment"].append("") self.el_path_lst.append(el_path) def write_file(self, file_name, cwd=None): self.electrons_per_atom_lst = list() self.max_cutoff_lst = list() self._set_potential_paths() if cwd is not None: file_name = posixpath.join(cwd, file_name) f = open(file_name, "w") for el_file in self.el_path_lst: with open(el_file) as pot_file: for i, line in enumerate(pot_file): f.write(line) if i == 1: self.electrons_per_atom_lst.append(int(float(line))) elif i == 14: mystr = line.split()[2][:-1] self.max_cutoff_lst.append(float(mystr)) f.close() def load_default(self): file_content = """\ xc GGA # LDA, GGA """ self.load_string(file_content)
true
true
f703e4a7028f63373e16c31d94c3f103f420f77b
18,454
py
Python
kubernetes/client/models/v1beta1_event.py
iguazio/python
c2684bb479d44a49a2010ec4ede5ffa7b17349dd
[ "Apache-2.0" ]
null
null
null
kubernetes/client/models/v1beta1_event.py
iguazio/python
c2684bb479d44a49a2010ec4ede5ffa7b17349dd
[ "Apache-2.0" ]
null
null
null
kubernetes/client/models/v1beta1_event.py
iguazio/python
c2684bb479d44a49a2010ec4ede5ffa7b17349dd
[ "Apache-2.0" ]
1
2019-01-10T11:13:52.000Z
2019-01-10T11:13:52.000Z
# coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: v1.13.1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class V1beta1Event(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'action': 'str', 'api_version': 'str', 'deprecated_count': 'int', 'deprecated_first_timestamp': 'datetime', 'deprecated_last_timestamp': 'datetime', 'deprecated_source': 'V1EventSource', 'event_time': 'datetime', 'kind': 'str', 'metadata': 'V1ObjectMeta', 'note': 'str', 'reason': 'str', 'regarding': 'V1ObjectReference', 'related': 'V1ObjectReference', 'reporting_controller': 'str', 'reporting_instance': 'str', 'series': 'V1beta1EventSeries', 'type': 'str' } attribute_map = { 'action': 'action', 'api_version': 'apiVersion', 'deprecated_count': 'deprecatedCount', 'deprecated_first_timestamp': 'deprecatedFirstTimestamp', 'deprecated_last_timestamp': 'deprecatedLastTimestamp', 'deprecated_source': 'deprecatedSource', 'event_time': 'eventTime', 'kind': 'kind', 'metadata': 'metadata', 'note': 'note', 'reason': 'reason', 'regarding': 'regarding', 'related': 'related', 'reporting_controller': 'reportingController', 'reporting_instance': 'reportingInstance', 'series': 'series', 'type': 'type' } def __init__(self, action=None, api_version=None, deprecated_count=None, deprecated_first_timestamp=None, deprecated_last_timestamp=None, deprecated_source=None, event_time=None, kind=None, metadata=None, note=None, reason=None, regarding=None, related=None, reporting_controller=None, reporting_instance=None, series=None, type=None): """ V1beta1Event - a model defined in Swagger """ self._action = None self._api_version = None self._deprecated_count = None self._deprecated_first_timestamp = None self._deprecated_last_timestamp = None self._deprecated_source = None self._event_time = None self._kind = None self._metadata = None self._note = None self._reason = None self._regarding = None self._related = None self._reporting_controller = None self._reporting_instance = None self._series = None self._type = None self.discriminator = None if action is not None: self.action = action if api_version is not None: self.api_version = api_version if deprecated_count is not None: self.deprecated_count = deprecated_count if deprecated_first_timestamp is not None: self.deprecated_first_timestamp = deprecated_first_timestamp if deprecated_last_timestamp is not None: self.deprecated_last_timestamp = deprecated_last_timestamp if deprecated_source is not None: self.deprecated_source = deprecated_source self.event_time = event_time if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata if note is not None: self.note = note if reason is not None: self.reason = reason if regarding is not None: self.regarding = regarding if related is not None: self.related = related if reporting_controller is not None: self.reporting_controller = reporting_controller if reporting_instance is not None: self.reporting_instance = reporting_instance if series is not None: self.series = series if type is not None: self.type = type @property def action(self): """ Gets the action of this V1beta1Event. What action was taken/failed regarding to the regarding object. :return: The action of this V1beta1Event. :rtype: str """ return self._action @action.setter def action(self, action): """ Sets the action of this V1beta1Event. What action was taken/failed regarding to the regarding object. :param action: The action of this V1beta1Event. :type: str """ self._action = action @property def api_version(self): """ Gets the api_version of this V1beta1Event. APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources :return: The api_version of this V1beta1Event. :rtype: str """ return self._api_version @api_version.setter def api_version(self, api_version): """ Sets the api_version of this V1beta1Event. APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources :param api_version: The api_version of this V1beta1Event. :type: str """ self._api_version = api_version @property def deprecated_count(self): """ Gets the deprecated_count of this V1beta1Event. Deprecated field assuring backward compatibility with core.v1 Event type :return: The deprecated_count of this V1beta1Event. :rtype: int """ return self._deprecated_count @deprecated_count.setter def deprecated_count(self, deprecated_count): """ Sets the deprecated_count of this V1beta1Event. Deprecated field assuring backward compatibility with core.v1 Event type :param deprecated_count: The deprecated_count of this V1beta1Event. :type: int """ self._deprecated_count = deprecated_count @property def deprecated_first_timestamp(self): """ Gets the deprecated_first_timestamp of this V1beta1Event. Deprecated field assuring backward compatibility with core.v1 Event type :return: The deprecated_first_timestamp of this V1beta1Event. :rtype: datetime """ return self._deprecated_first_timestamp @deprecated_first_timestamp.setter def deprecated_first_timestamp(self, deprecated_first_timestamp): """ Sets the deprecated_first_timestamp of this V1beta1Event. Deprecated field assuring backward compatibility with core.v1 Event type :param deprecated_first_timestamp: The deprecated_first_timestamp of this V1beta1Event. :type: datetime """ self._deprecated_first_timestamp = deprecated_first_timestamp @property def deprecated_last_timestamp(self): """ Gets the deprecated_last_timestamp of this V1beta1Event. Deprecated field assuring backward compatibility with core.v1 Event type :return: The deprecated_last_timestamp of this V1beta1Event. :rtype: datetime """ return self._deprecated_last_timestamp @deprecated_last_timestamp.setter def deprecated_last_timestamp(self, deprecated_last_timestamp): """ Sets the deprecated_last_timestamp of this V1beta1Event. Deprecated field assuring backward compatibility with core.v1 Event type :param deprecated_last_timestamp: The deprecated_last_timestamp of this V1beta1Event. :type: datetime """ self._deprecated_last_timestamp = deprecated_last_timestamp @property def deprecated_source(self): """ Gets the deprecated_source of this V1beta1Event. Deprecated field assuring backward compatibility with core.v1 Event type :return: The deprecated_source of this V1beta1Event. :rtype: V1EventSource """ return self._deprecated_source @deprecated_source.setter def deprecated_source(self, deprecated_source): """ Sets the deprecated_source of this V1beta1Event. Deprecated field assuring backward compatibility with core.v1 Event type :param deprecated_source: The deprecated_source of this V1beta1Event. :type: V1EventSource """ self._deprecated_source = deprecated_source @property def event_time(self): """ Gets the event_time of this V1beta1Event. Required. Time when this Event was first observed. :return: The event_time of this V1beta1Event. :rtype: datetime """ return self._event_time @event_time.setter def event_time(self, event_time): """ Sets the event_time of this V1beta1Event. Required. Time when this Event was first observed. :param event_time: The event_time of this V1beta1Event. :type: datetime """ if event_time is None: raise ValueError("Invalid value for `event_time`, must not be `None`") self._event_time = event_time @property def kind(self): """ Gets the kind of this V1beta1Event. Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds :return: The kind of this V1beta1Event. :rtype: str """ return self._kind @kind.setter def kind(self, kind): """ Sets the kind of this V1beta1Event. Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds :param kind: The kind of this V1beta1Event. :type: str """ self._kind = kind @property def metadata(self): """ Gets the metadata of this V1beta1Event. :return: The metadata of this V1beta1Event. :rtype: V1ObjectMeta """ return self._metadata @metadata.setter def metadata(self, metadata): """ Sets the metadata of this V1beta1Event. :param metadata: The metadata of this V1beta1Event. :type: V1ObjectMeta """ self._metadata = metadata @property def note(self): """ Gets the note of this V1beta1Event. Optional. A human-readable description of the status of this operation. Maximal length of the note is 1kB, but libraries should be prepared to handle values up to 64kB. :return: The note of this V1beta1Event. :rtype: str """ return self._note @note.setter def note(self, note): """ Sets the note of this V1beta1Event. Optional. A human-readable description of the status of this operation. Maximal length of the note is 1kB, but libraries should be prepared to handle values up to 64kB. :param note: The note of this V1beta1Event. :type: str """ self._note = note @property def reason(self): """ Gets the reason of this V1beta1Event. Why the action was taken. :return: The reason of this V1beta1Event. :rtype: str """ return self._reason @reason.setter def reason(self, reason): """ Sets the reason of this V1beta1Event. Why the action was taken. :param reason: The reason of this V1beta1Event. :type: str """ self._reason = reason @property def regarding(self): """ Gets the regarding of this V1beta1Event. The object this Event is about. In most cases it's an Object reporting controller implements. E.g. ReplicaSetController implements ReplicaSets and this event is emitted because it acts on some changes in a ReplicaSet object. :return: The regarding of this V1beta1Event. :rtype: V1ObjectReference """ return self._regarding @regarding.setter def regarding(self, regarding): """ Sets the regarding of this V1beta1Event. The object this Event is about. In most cases it's an Object reporting controller implements. E.g. ReplicaSetController implements ReplicaSets and this event is emitted because it acts on some changes in a ReplicaSet object. :param regarding: The regarding of this V1beta1Event. :type: V1ObjectReference """ self._regarding = regarding @property def related(self): """ Gets the related of this V1beta1Event. Optional secondary object for more complex actions. E.g. when regarding object triggers a creation or deletion of related object. :return: The related of this V1beta1Event. :rtype: V1ObjectReference """ return self._related @related.setter def related(self, related): """ Sets the related of this V1beta1Event. Optional secondary object for more complex actions. E.g. when regarding object triggers a creation or deletion of related object. :param related: The related of this V1beta1Event. :type: V1ObjectReference """ self._related = related @property def reporting_controller(self): """ Gets the reporting_controller of this V1beta1Event. Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`. :return: The reporting_controller of this V1beta1Event. :rtype: str """ return self._reporting_controller @reporting_controller.setter def reporting_controller(self, reporting_controller): """ Sets the reporting_controller of this V1beta1Event. Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`. :param reporting_controller: The reporting_controller of this V1beta1Event. :type: str """ self._reporting_controller = reporting_controller @property def reporting_instance(self): """ Gets the reporting_instance of this V1beta1Event. ID of the controller instance, e.g. `kubelet-xyzf`. :return: The reporting_instance of this V1beta1Event. :rtype: str """ return self._reporting_instance @reporting_instance.setter def reporting_instance(self, reporting_instance): """ Sets the reporting_instance of this V1beta1Event. ID of the controller instance, e.g. `kubelet-xyzf`. :param reporting_instance: The reporting_instance of this V1beta1Event. :type: str """ self._reporting_instance = reporting_instance @property def series(self): """ Gets the series of this V1beta1Event. Data about the Event series this event represents or nil if it's a singleton Event. :return: The series of this V1beta1Event. :rtype: V1beta1EventSeries """ return self._series @series.setter def series(self, series): """ Sets the series of this V1beta1Event. Data about the Event series this event represents or nil if it's a singleton Event. :param series: The series of this V1beta1Event. :type: V1beta1EventSeries """ self._series = series @property def type(self): """ Gets the type of this V1beta1Event. Type of this event (Normal, Warning), new types could be added in the future. :return: The type of this V1beta1Event. :rtype: str """ return self._type @type.setter def type(self, type): """ Sets the type of this V1beta1Event. Type of this event (Normal, Warning), new types could be added in the future. :param type: The type of this V1beta1Event. :type: str """ self._type = type def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, V1beta1Event): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
32.149826
339
0.634117
from pprint import pformat from six import iteritems import re class V1beta1Event(object): swagger_types = { 'action': 'str', 'api_version': 'str', 'deprecated_count': 'int', 'deprecated_first_timestamp': 'datetime', 'deprecated_last_timestamp': 'datetime', 'deprecated_source': 'V1EventSource', 'event_time': 'datetime', 'kind': 'str', 'metadata': 'V1ObjectMeta', 'note': 'str', 'reason': 'str', 'regarding': 'V1ObjectReference', 'related': 'V1ObjectReference', 'reporting_controller': 'str', 'reporting_instance': 'str', 'series': 'V1beta1EventSeries', 'type': 'str' } attribute_map = { 'action': 'action', 'api_version': 'apiVersion', 'deprecated_count': 'deprecatedCount', 'deprecated_first_timestamp': 'deprecatedFirstTimestamp', 'deprecated_last_timestamp': 'deprecatedLastTimestamp', 'deprecated_source': 'deprecatedSource', 'event_time': 'eventTime', 'kind': 'kind', 'metadata': 'metadata', 'note': 'note', 'reason': 'reason', 'regarding': 'regarding', 'related': 'related', 'reporting_controller': 'reportingController', 'reporting_instance': 'reportingInstance', 'series': 'series', 'type': 'type' } def __init__(self, action=None, api_version=None, deprecated_count=None, deprecated_first_timestamp=None, deprecated_last_timestamp=None, deprecated_source=None, event_time=None, kind=None, metadata=None, note=None, reason=None, regarding=None, related=None, reporting_controller=None, reporting_instance=None, series=None, type=None): self._action = None self._api_version = None self._deprecated_count = None self._deprecated_first_timestamp = None self._deprecated_last_timestamp = None self._deprecated_source = None self._event_time = None self._kind = None self._metadata = None self._note = None self._reason = None self._regarding = None self._related = None self._reporting_controller = None self._reporting_instance = None self._series = None self._type = None self.discriminator = None if action is not None: self.action = action if api_version is not None: self.api_version = api_version if deprecated_count is not None: self.deprecated_count = deprecated_count if deprecated_first_timestamp is not None: self.deprecated_first_timestamp = deprecated_first_timestamp if deprecated_last_timestamp is not None: self.deprecated_last_timestamp = deprecated_last_timestamp if deprecated_source is not None: self.deprecated_source = deprecated_source self.event_time = event_time if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata if note is not None: self.note = note if reason is not None: self.reason = reason if regarding is not None: self.regarding = regarding if related is not None: self.related = related if reporting_controller is not None: self.reporting_controller = reporting_controller if reporting_instance is not None: self.reporting_instance = reporting_instance if series is not None: self.series = series if type is not None: self.type = type @property def action(self): return self._action @action.setter def action(self, action): self._action = action @property def api_version(self): return self._api_version @api_version.setter def api_version(self, api_version): self._api_version = api_version @property def deprecated_count(self): return self._deprecated_count @deprecated_count.setter def deprecated_count(self, deprecated_count): self._deprecated_count = deprecated_count @property def deprecated_first_timestamp(self): return self._deprecated_first_timestamp @deprecated_first_timestamp.setter def deprecated_first_timestamp(self, deprecated_first_timestamp): self._deprecated_first_timestamp = deprecated_first_timestamp @property def deprecated_last_timestamp(self): return self._deprecated_last_timestamp @deprecated_last_timestamp.setter def deprecated_last_timestamp(self, deprecated_last_timestamp): self._deprecated_last_timestamp = deprecated_last_timestamp @property def deprecated_source(self): return self._deprecated_source @deprecated_source.setter def deprecated_source(self, deprecated_source): self._deprecated_source = deprecated_source @property def event_time(self): return self._event_time @event_time.setter def event_time(self, event_time): if event_time is None: raise ValueError("Invalid value for `event_time`, must not be `None`") self._event_time = event_time @property def kind(self): return self._kind @kind.setter def kind(self, kind): self._kind = kind @property def metadata(self): return self._metadata @metadata.setter def metadata(self, metadata): self._metadata = metadata @property def note(self): return self._note @note.setter def note(self, note): self._note = note @property def reason(self): return self._reason @reason.setter def reason(self, reason): self._reason = reason @property def regarding(self): return self._regarding @regarding.setter def regarding(self, regarding): self._regarding = regarding @property def related(self): return self._related @related.setter def related(self, related): self._related = related @property def reporting_controller(self): return self._reporting_controller @reporting_controller.setter def reporting_controller(self, reporting_controller): self._reporting_controller = reporting_controller @property def reporting_instance(self): return self._reporting_instance @reporting_instance.setter def reporting_instance(self, reporting_instance): self._reporting_instance = reporting_instance @property def series(self): return self._series @series.setter def series(self, series): self._series = series @property def type(self): return self._type @type.setter def type(self, type): self._type = type def to_dict(self): result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): return pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, V1beta1Event): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self == other
true
true
f703e4dad2354c26235f39a5db5e721bba4fa7e8
788
py
Python
Lab2/task[1-3].py
ValeriyMartsyshyn/HelloPython
badcf85bc54e889eed0286b420e5cabe68da8572
[ "MIT" ]
null
null
null
Lab2/task[1-3].py
ValeriyMartsyshyn/HelloPython
badcf85bc54e889eed0286b420e5cabe68da8572
[ "MIT" ]
null
null
null
Lab2/task[1-3].py
ValeriyMartsyshyn/HelloPython
badcf85bc54e889eed0286b420e5cabe68da8572
[ "MIT" ]
null
null
null
#task 1 nyaam = float (input('enter a length in cm: ')) if nyaam < 0: print ('entry is invalid') else: res = nyaam / 2.54 print (res, 'inch') #task 2 whoosh = int (input ('how many credits have you taken? ')) if whoosh > 0 and whoosh < 24: print ('congrats, you a freshman!') elif whoosh > 23 and whoosh < 54: print ('congrats, you a sophomore!') elif whoosh > 53 and whoosh < 84: print ('congrats, you a junior!') elif whoosh > 83: print ('congrats, you a senior!') elif whoosh <= 0: print ('you haven\'t any credits, fool') #task3 from random import randrange jeffry = randrange(10) goat = float (input ('guess the number between 0 n 10: ')) if goat == jeffry: print ('you\'re right!') else: print ('that\'s not it, pal') print (jeffry)
21.888889
58
0.628173
nyaam = float (input('enter a length in cm: ')) if nyaam < 0: print ('entry is invalid') else: res = nyaam / 2.54 print (res, 'inch') whoosh = int (input ('how many credits have you taken? ')) if whoosh > 0 and whoosh < 24: print ('congrats, you a freshman!') elif whoosh > 23 and whoosh < 54: print ('congrats, you a sophomore!') elif whoosh > 53 and whoosh < 84: print ('congrats, you a junior!') elif whoosh > 83: print ('congrats, you a senior!') elif whoosh <= 0: print ('you haven\'t any credits, fool') #task3 from random import randrange jeffry = randrange(10) goat = float (input ('guess the number between 0 n 10: ')) if goat == jeffry: print ('you\'re right!') else: print ('that\'s not it, pal') print (jeffry)
true
true
f703e4f4b3879387dd8d422d53b101ca0ddef7f8
16,603
py
Python
mp/models/continual/model_utils.py
MECLabTUDA/ACS
bb418c5479a3585138c48c63112352f5cc8f64b1
[ "MIT" ]
5
2021-07-20T16:57:14.000Z
2022-03-21T23:52:59.000Z
mp/models/continual/model_utils.py
MECLabTUDA/ACS
bb418c5479a3585138c48c63112352f5cc8f64b1
[ "MIT" ]
null
null
null
mp/models/continual/model_utils.py
MECLabTUDA/ACS
bb418c5479a3585138c48c63112352f5cc8f64b1
[ "MIT" ]
1
2021-07-20T16:57:15.000Z
2021-07-20T16:57:15.000Z
import torch import torch.nn as nn import torch.nn.functional as F from mp.models.segmentation.unet_fepegar import UNet2D ### UNet Wrapper ### class UNet2D_dis(UNet2D): r"""Wrapper for UNet2D to access encoder and decoder seperately. """ def __init__(self, *args, **kwargs): super(UNet2D_dis, self).__init__(*args, **kwargs) def forward_enc(self, x): skip_connections, encoding = self.encoder(x) encoding = self.bottom_block(encoding) return skip_connections, encoding def forward_dec(self, skip_connections, encoding): x = self.decoder(skip_connections, encoding) if self.monte_carlo_layer is not None: x = self.monte_carlo_layer(x) return self.classifier(x) ### MODULES ### class EncoderStyle(nn.Module): r"""Style Encoder (VAE). """ def __init__(self, in_channels): super(EncoderStyle, self).__init__() layers = [] layers += [ConvBlock(in_channels=in_channels, out_channels=256)] layers += [ConvPoolBlock(in_channels=256, out_channels=64, pooling=False)] layers += [ConvPoolBlock(in_channels=64, out_channels=128, pooling=True)] layers += [ConvPoolBlock(in_channels=128, out_channels=128, pooling=False)] layers += [ConvPoolBlock(in_channels=128, out_channels=192, pooling=True)] layers += [ConvPoolBlock(in_channels=192, out_channels=192, pooling=False)] layers += [ConvPoolBlock(in_channels=192, out_channels=256, pooling=True)] global_pool = [nn.LeakyReLU(), nn.AdaptiveMaxPool2d(output_size=(3,3))] self.global_pool = nn.Sequential(*global_pool) self.layers = nn.Sequential(*layers) self.dense_mu = nn.Linear(in_features=3*3*256, out_features=1) self.dense_var = nn.Linear(in_features=3*3*256, out_features=1) def forward(self, x): x = self.layers(x) x = self.global_pool(x) mu = self.dense_mu(x.view(x.shape[0], -1)) log_var = self.dense_var(x.view(x.shape[0], -1)) return [mu, log_var] class LatentScaler(nn.Module): r"""Scales samples from style encoding to be injected into the generator. """ def __init__(self, in_features): super(LatentScaler, self).__init__() layers = [nn.Linear(in_features=in_features, out_features=500), nn.LeakyReLU()] layers += [nn.Linear(in_features=500, out_features=1024), nn.LeakyReLU()] for _ in range(0, 2): layers += [nn.Linear(in_features=1024, out_features=1024), nn.LeakyReLU()] layers += [nn.Linear(in_features=1024, out_features=2560), nn.Tanh()] self.layers = nn.Sequential(*layers) def forward(self, x): x = self.layers(x).reshape(x.shape[0],10,-1) # 10 occurences a 256 filters return x class Generator(nn.Module): r"""Generator using content encoding, scaled style encoding (see LatentScaler) and domain_code to generate images. """ def __init__(self, in_channels, out_channels, domain_code_size): super(Generator, self).__init__() layers_BCIN = [ResBlockBCIN(in_channels=in_channels, out_channels=in_channels, layer_id=0, stride=1, padding=1, domain_code_size=domain_code_size)] for i in range(0,4): layers_BCIN += [ResBlockBCIN(in_channels=in_channels, out_channels=in_channels, layer_id=i+1, stride=1, padding=1, domain_code_size=domain_code_size)] layers = [nn.ConvTranspose2d(in_channels=in_channels, out_channels=in_channels, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()] layers += [nn.ConvTranspose2d(in_channels=in_channels, out_channels=128, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()] layers += [nn.ConvTranspose2d(in_channels=128, out_channels=128, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()] layers += [nn.ConvTranspose2d(in_channels=128, out_channels=64, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()] layers += [nn.ConvTranspose2d(in_channels=64, out_channels=out_channels, kernel_size=7, stride=1, padding=3), nn.Sigmoid()] self.layers_BCIN = MultiInSequential(*layers_BCIN) self.layers = nn.Sequential(*layers) def forward(self, content, latent_scale, domain_code): content, latent_scale, domain_code = self.layers_BCIN(content, latent_scale, domain_code) x = self.layers(content) return x class DiscriminatorDomain(nn.Module): r"""Domain Discriminator. """ def __init__(self, in_channels, domain_code_size, max_channels=512, kernel_size=4, stride=2): super(DiscriminatorDomain, self).__init__() layers = [ConvBlockBCIN(in_channels=in_channels, out_channels=64, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)] layers += [ConvBlockBCIN(in_channels=64, out_channels=128, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)] layers += [ConvBlockBCIN(in_channels=128, out_channels=max_channels//2, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)] layers += [ConvBlockBCIN(in_channels=max_channels//2, out_channels=max_channels, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)] layers += [ConvBlockBCIN(in_channels=max_channels, out_channels=1, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size, normalization='None')] self.layers = MultiInSequential(*layers) self.linear = nn.Linear(in_features=7**2, out_features=1) self.activation = nn.Sigmoid() def forward(self, x, domain_code): x, domain_code = self.layers(x, domain_code) x = x.view(x.shape[0],-1) x = self.linear(x) return x class DiscriminatorContent(nn.Module): r"""Unet-style Content Discriminator. """ def __init__(self, in_channels, domain_code_size, max_channels=512, kernel_size=3, stride=2): super(DiscriminatorContent, self).__init__() self.in_channels = 16 self.in_channels_max = 128 self.out_channels = 32 self.out_channels_max = 256 padding = 1 self.conv_0 = nn.Conv2d(in_channels=self.in_channels, out_channels=self.in_channels*2**1, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_0 = nn.BatchNorm2d(self.in_channels*2**1) self.activation_0 = nn.ReLU() self.conv_1 = nn.Conv2d(in_channels=self.in_channels*2**1, out_channels=self.in_channels*2**2, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_1 = nn.BatchNorm2d(self.in_channels*2**2) self.activation_1 = nn.ReLU() self.conv_2 = nn.Conv2d(in_channels=self.in_channels*2**2, out_channels=self.in_channels*2**3, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_2 = nn.BatchNorm2d(self.in_channels*2**3) self.activation_2 = nn.ReLU() self.conv_3 = nn.Conv2d(in_channels=self.in_channels*2**3, out_channels=self.in_channels*2**4, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_3 = nn.BatchNorm2d(self.in_channels*2**4) self.activation_3 = nn.ReLU() self.conv_4 = nn.Conv2d(in_channels=self.in_channels*2**4, out_channels=1, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_4 = nn.BatchNorm2d(1) self.activation_4 = nn.ReLU() self.dense = nn.Linear(in_features = 8**2, out_features=domain_code_size) self.softmax = nn.Softmax(dim=1) def forward(self, skip_connections, content_x): out = self.conv_0(skip_connections[0]) out = self.norm_0(out) out = self.activation_0(out) out = self.conv_1(skip_connections[1] + out) out = self.norm_1(out) out = self.activation_1(out) out = self.conv_2(skip_connections[2] + out) out = self.norm_2(out) out = self.activation_2(out) out = self.conv_3(skip_connections[3] + out) out = self.norm_3(out) out = self.activation_3(out) out = self.conv_4(content_x + out) out = self.norm_4(out) out = self.activation_4(out) out = self.dense(out.reshape(content_x.shape[0], -1)) out = self.softmax(out) return out def center_crop(self, skip_connection, x): skip_shape = torch.tensor(skip_connection.shape) x_shape = torch.tensor(x.shape) crop = skip_shape[2:] - x_shape[2:] half_crop = crop // 2 # If skip_connection is 10, 20, 30 and x is (6, 14, 12) # Then pad will be (-2, -2, -3, -3, -9, -9) pad = -torch.stack((half_crop, half_crop)).t().flatten() skip_connection = F.pad(skip_connection, pad.tolist()) return skip_connection ### BUILDING BLOCKS ### class ConvBlock(nn.Module): r"""Convolutional Block with normalization and activation. """ def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.LeakyReLU, normalization='Instance'): super(ConvBlock, self).__init__() self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.normalization = normalization if self.normalization == 'Instance': self.norm = nn.InstanceNorm2d(num_features=out_channels, affine=False) # not learnable if self.normalization =='BatchNorm': self.norm = nn.BatchNorm2d(num_features=out_channels) self.activation = activation() def forward(self,x): x = self.conv(x) if self.normalization in ['Instance', 'BatchNorm']: x = self.norm(x) x = self.activation(x) return x class ConvPoolBlock(nn.Module): r"""Convolutional Block with normalization, activation and pooling. """ def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, pooling=True, activation=nn.LeakyReLU): super(ConvPoolBlock, self).__init__() self.pooling = pooling self.norm= nn.InstanceNorm2d(num_features=out_channels, affine=False) # not learnable self.activation = activation() self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.pool = nn.AvgPool2d(kernel_size=kernel_size) def forward(self, x): x = self.norm(x) x = self.activation(x) x = self.conv(x) if self.pooling: x = self.pool(x) return x class ConvBlockBCIN(nn.Module): r"""Convolutional Block with BCIN normalization and activation. """ def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.LeakyReLU, domain_code_size=10, normalization='BCIN'): super(ConvBlockBCIN, self).__init__() self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.norm = BCIN(out_channels, domain_code_size) # not learnable self.activation = activation() self.normalization = normalization def forward(self, x, domain_code): x = self.conv(x) if self.normalization == 'BCIN': x = self.norm(x, domain_code) x = self.activation(x) return x, domain_code class ResBlockIN(nn.Module): r"""Residual Block consisting of two convolutions with skip connection, instance normalization and activation. """ def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.ReLU): super(ResBlockIN, self).__init__() self.conv0 = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.conv1 = nn.Conv2d(in_channels=out_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.norm0 = nn.InstanceNorm2d(num_features=out_channels, affine=False) # not learnable self.norm1 = nn.InstanceNorm2d(num_features=out_channels, affine=False) # not learnable self.activation = activation() def forward(self, x): x_in = x x = self.conv0(x) x = self.norm0(x) x = self.activation(x) x = self.conv1(x) x = self.norm1(x) x += self.center_crop(x_in, x) return x def center_crop(self, skip_connection, x): skip_shape = torch.tensor(skip_connection.shape) x_shape = torch.tensor(x.shape) crop = skip_shape[2:] - x_shape[2:] half_crop = crop // 2 # If skip_connection is 10, 20, 30 and x is (6, 14, 12) # Then pad will be (-2, -2, -3, -3, -9, -9) pad = -torch.stack((half_crop, half_crop)).t().flatten() skip_connection = F.pad(skip_connection, pad.tolist()) return skip_connection class ResBlockBCIN(nn.Module): r"""Residual Block consisting of two convolutions with skip connection, BCIN normalization and activation. """ def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.ReLU, domain_code_size=10, layer_id=0): super(ResBlockBCIN, self).__init__() self.conv0 = nn.ConvTranspose2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.conv1 = nn.ConvTranspose2d(in_channels=out_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.norm0 = BCIN(num_features=out_channels, domain_code_size=domain_code_size, affine=True) # learnable self.norm1 = BCIN(num_features=out_channels, domain_code_size=domain_code_size, affine=True) # learnable self.activation = activation() self.layer_id = layer_id def forward(self, x, latent_scale, domain_code): x_in = x x = self.conv0(x) x = torch.mul(x, latent_scale[:,self.layer_id*2,:][:,:,None,None]) x = self.norm0(x, domain_code) x = self.activation(x) x = self.conv1(x) x = torch.mul(x, latent_scale[:,self.layer_id*2+1,:][:,:,None,None]) x = self.norm1(x, domain_code) x += self.center_crop(x_in, x) return x, latent_scale, domain_code def center_crop(self, skip_connection, x): skip_shape = torch.tensor(skip_connection.shape) x_shape = torch.tensor(x.shape) crop = skip_shape[2:] - x_shape[2:] half_crop = crop // 2 # If skip_connection is 10, 20, 30 and x is (6, 14, 12) # Then pad will be (-2, -2, -3, -3, -9, -9) pad = -torch.stack((half_crop, half_crop)).t().flatten() skip_connection = F.pad(skip_connection, pad.tolist()) return skip_connection ### NORMALIZATION ### class BCIN(nn.Module): r"""Central Biasing Instance Normalization https://arxiv.org/abs/1806.10050 """ def __init__(self, num_features, domain_code_size, affine=True, instance_norm=False, batch_norm=False): super(BCIN, self).__init__() self.W = nn.Parameter(torch.rand(domain_code_size), requires_grad=affine) self.b = nn.Parameter(torch.rand(1), requires_grad=affine) self.activation = nn.Tanh() self.instance_norm = instance_norm if self.instance_norm: print('Using instance_norm instead of BCIN') self.i_norm = torch.nn.InstanceNorm2d(num_features=num_features) self.batch_norm = batch_norm if self.instance_norm: print('Using batch_norm instead of BCIN') self.b_norm = torch.nn.BatchNorm2d(num_features=num_features) def forward(self, x, domain_code): x_var = torch.sqrt(torch.var(x, (1,2,3))) # instance std x_mean = torch.mean(x, (1,2,3)) # instance mean bias = torch.matmul(domain_code, self.W) * self.b bias_scaled = self.activation(bias) if self.instance_norm: return self.i_norm(x) if self.batch_norm: return self.b_norm(x) return ((x-x_mean[:,None,None,None]) / x_var[:,None,None,None]) + bias_scaled[:,None,None,None] ### HELPER MODULES ### class MultiInSequential(nn.Sequential): r"""Sequential class that allows multiple inputs for forward function """ def forward(self, *input): for module in self._modules.values(): input = module(*input) return input
45.487671
172
0.671565
import torch import torch.nn as nn import torch.nn.functional as F from mp.models.segmentation.unet_fepegar import UNet2D class UNet2D_dis(UNet2D): def __init__(self, *args, **kwargs): super(UNet2D_dis, self).__init__(*args, **kwargs) def forward_enc(self, x): skip_connections, encoding = self.encoder(x) encoding = self.bottom_block(encoding) return skip_connections, encoding def forward_dec(self, skip_connections, encoding): x = self.decoder(skip_connections, encoding) if self.monte_carlo_layer is not None: x = self.monte_carlo_layer(x) return self.classifier(x) class EncoderStyle(nn.Module): def __init__(self, in_channels): super(EncoderStyle, self).__init__() layers = [] layers += [ConvBlock(in_channels=in_channels, out_channels=256)] layers += [ConvPoolBlock(in_channels=256, out_channels=64, pooling=False)] layers += [ConvPoolBlock(in_channels=64, out_channels=128, pooling=True)] layers += [ConvPoolBlock(in_channels=128, out_channels=128, pooling=False)] layers += [ConvPoolBlock(in_channels=128, out_channels=192, pooling=True)] layers += [ConvPoolBlock(in_channels=192, out_channels=192, pooling=False)] layers += [ConvPoolBlock(in_channels=192, out_channels=256, pooling=True)] global_pool = [nn.LeakyReLU(), nn.AdaptiveMaxPool2d(output_size=(3,3))] self.global_pool = nn.Sequential(*global_pool) self.layers = nn.Sequential(*layers) self.dense_mu = nn.Linear(in_features=3*3*256, out_features=1) self.dense_var = nn.Linear(in_features=3*3*256, out_features=1) def forward(self, x): x = self.layers(x) x = self.global_pool(x) mu = self.dense_mu(x.view(x.shape[0], -1)) log_var = self.dense_var(x.view(x.shape[0], -1)) return [mu, log_var] class LatentScaler(nn.Module): def __init__(self, in_features): super(LatentScaler, self).__init__() layers = [nn.Linear(in_features=in_features, out_features=500), nn.LeakyReLU()] layers += [nn.Linear(in_features=500, out_features=1024), nn.LeakyReLU()] for _ in range(0, 2): layers += [nn.Linear(in_features=1024, out_features=1024), nn.LeakyReLU()] layers += [nn.Linear(in_features=1024, out_features=2560), nn.Tanh()] self.layers = nn.Sequential(*layers) def forward(self, x): x = self.layers(x).reshape(x.shape[0],10,-1) return x class Generator(nn.Module): def __init__(self, in_channels, out_channels, domain_code_size): super(Generator, self).__init__() layers_BCIN = [ResBlockBCIN(in_channels=in_channels, out_channels=in_channels, layer_id=0, stride=1, padding=1, domain_code_size=domain_code_size)] for i in range(0,4): layers_BCIN += [ResBlockBCIN(in_channels=in_channels, out_channels=in_channels, layer_id=i+1, stride=1, padding=1, domain_code_size=domain_code_size)] layers = [nn.ConvTranspose2d(in_channels=in_channels, out_channels=in_channels, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()] layers += [nn.ConvTranspose2d(in_channels=in_channels, out_channels=128, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()] layers += [nn.ConvTranspose2d(in_channels=128, out_channels=128, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()] layers += [nn.ConvTranspose2d(in_channels=128, out_channels=64, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()] layers += [nn.ConvTranspose2d(in_channels=64, out_channels=out_channels, kernel_size=7, stride=1, padding=3), nn.Sigmoid()] self.layers_BCIN = MultiInSequential(*layers_BCIN) self.layers = nn.Sequential(*layers) def forward(self, content, latent_scale, domain_code): content, latent_scale, domain_code = self.layers_BCIN(content, latent_scale, domain_code) x = self.layers(content) return x class DiscriminatorDomain(nn.Module): def __init__(self, in_channels, domain_code_size, max_channels=512, kernel_size=4, stride=2): super(DiscriminatorDomain, self).__init__() layers = [ConvBlockBCIN(in_channels=in_channels, out_channels=64, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)] layers += [ConvBlockBCIN(in_channels=64, out_channels=128, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)] layers += [ConvBlockBCIN(in_channels=128, out_channels=max_channels//2, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)] layers += [ConvBlockBCIN(in_channels=max_channels//2, out_channels=max_channels, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)] layers += [ConvBlockBCIN(in_channels=max_channels, out_channels=1, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size, normalization='None')] self.layers = MultiInSequential(*layers) self.linear = nn.Linear(in_features=7**2, out_features=1) self.activation = nn.Sigmoid() def forward(self, x, domain_code): x, domain_code = self.layers(x, domain_code) x = x.view(x.shape[0],-1) x = self.linear(x) return x class DiscriminatorContent(nn.Module): def __init__(self, in_channels, domain_code_size, max_channels=512, kernel_size=3, stride=2): super(DiscriminatorContent, self).__init__() self.in_channels = 16 self.in_channels_max = 128 self.out_channels = 32 self.out_channels_max = 256 padding = 1 self.conv_0 = nn.Conv2d(in_channels=self.in_channels, out_channels=self.in_channels*2**1, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_0 = nn.BatchNorm2d(self.in_channels*2**1) self.activation_0 = nn.ReLU() self.conv_1 = nn.Conv2d(in_channels=self.in_channels*2**1, out_channels=self.in_channels*2**2, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_1 = nn.BatchNorm2d(self.in_channels*2**2) self.activation_1 = nn.ReLU() self.conv_2 = nn.Conv2d(in_channels=self.in_channels*2**2, out_channels=self.in_channels*2**3, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_2 = nn.BatchNorm2d(self.in_channels*2**3) self.activation_2 = nn.ReLU() self.conv_3 = nn.Conv2d(in_channels=self.in_channels*2**3, out_channels=self.in_channels*2**4, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_3 = nn.BatchNorm2d(self.in_channels*2**4) self.activation_3 = nn.ReLU() self.conv_4 = nn.Conv2d(in_channels=self.in_channels*2**4, out_channels=1, kernel_size=kernel_size, stride=stride, padding=padding) self.norm_4 = nn.BatchNorm2d(1) self.activation_4 = nn.ReLU() self.dense = nn.Linear(in_features = 8**2, out_features=domain_code_size) self.softmax = nn.Softmax(dim=1) def forward(self, skip_connections, content_x): out = self.conv_0(skip_connections[0]) out = self.norm_0(out) out = self.activation_0(out) out = self.conv_1(skip_connections[1] + out) out = self.norm_1(out) out = self.activation_1(out) out = self.conv_2(skip_connections[2] + out) out = self.norm_2(out) out = self.activation_2(out) out = self.conv_3(skip_connections[3] + out) out = self.norm_3(out) out = self.activation_3(out) out = self.conv_4(content_x + out) out = self.norm_4(out) out = self.activation_4(out) out = self.dense(out.reshape(content_x.shape[0], -1)) out = self.softmax(out) return out def center_crop(self, skip_connection, x): skip_shape = torch.tensor(skip_connection.shape) x_shape = torch.tensor(x.shape) crop = skip_shape[2:] - x_shape[2:] half_crop = crop // 2 pad = -torch.stack((half_crop, half_crop)).t().flatten() skip_connection = F.pad(skip_connection, pad.tolist()) return skip_connection class ConvBlock(nn.Module): def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.LeakyReLU, normalization='Instance'): super(ConvBlock, self).__init__() self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.normalization = normalization if self.normalization == 'Instance': self.norm = nn.InstanceNorm2d(num_features=out_channels, affine=False) if self.normalization =='BatchNorm': self.norm = nn.BatchNorm2d(num_features=out_channels) self.activation = activation() def forward(self,x): x = self.conv(x) if self.normalization in ['Instance', 'BatchNorm']: x = self.norm(x) x = self.activation(x) return x class ConvPoolBlock(nn.Module): def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, pooling=True, activation=nn.LeakyReLU): super(ConvPoolBlock, self).__init__() self.pooling = pooling self.norm= nn.InstanceNorm2d(num_features=out_channels, affine=False) self.activation = activation() self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.pool = nn.AvgPool2d(kernel_size=kernel_size) def forward(self, x): x = self.norm(x) x = self.activation(x) x = self.conv(x) if self.pooling: x = self.pool(x) return x class ConvBlockBCIN(nn.Module): def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.LeakyReLU, domain_code_size=10, normalization='BCIN'): super(ConvBlockBCIN, self).__init__() self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.norm = BCIN(out_channels, domain_code_size) self.activation = activation() self.normalization = normalization def forward(self, x, domain_code): x = self.conv(x) if self.normalization == 'BCIN': x = self.norm(x, domain_code) x = self.activation(x) return x, domain_code class ResBlockIN(nn.Module): def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.ReLU): super(ResBlockIN, self).__init__() self.conv0 = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.conv1 = nn.Conv2d(in_channels=out_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.norm0 = nn.InstanceNorm2d(num_features=out_channels, affine=False) self.norm1 = nn.InstanceNorm2d(num_features=out_channels, affine=False) self.activation = activation() def forward(self, x): x_in = x x = self.conv0(x) x = self.norm0(x) x = self.activation(x) x = self.conv1(x) x = self.norm1(x) x += self.center_crop(x_in, x) return x def center_crop(self, skip_connection, x): skip_shape = torch.tensor(skip_connection.shape) x_shape = torch.tensor(x.shape) crop = skip_shape[2:] - x_shape[2:] half_crop = crop // 2 pad = -torch.stack((half_crop, half_crop)).t().flatten() skip_connection = F.pad(skip_connection, pad.tolist()) return skip_connection class ResBlockBCIN(nn.Module): def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.ReLU, domain_code_size=10, layer_id=0): super(ResBlockBCIN, self).__init__() self.conv0 = nn.ConvTranspose2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.conv1 = nn.ConvTranspose2d(in_channels=out_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding) self.norm0 = BCIN(num_features=out_channels, domain_code_size=domain_code_size, affine=True) self.norm1 = BCIN(num_features=out_channels, domain_code_size=domain_code_size, affine=True) self.activation = activation() self.layer_id = layer_id def forward(self, x, latent_scale, domain_code): x_in = x x = self.conv0(x) x = torch.mul(x, latent_scale[:,self.layer_id*2,:][:,:,None,None]) x = self.norm0(x, domain_code) x = self.activation(x) x = self.conv1(x) x = torch.mul(x, latent_scale[:,self.layer_id*2+1,:][:,:,None,None]) x = self.norm1(x, domain_code) x += self.center_crop(x_in, x) return x, latent_scale, domain_code def center_crop(self, skip_connection, x): skip_shape = torch.tensor(skip_connection.shape) x_shape = torch.tensor(x.shape) crop = skip_shape[2:] - x_shape[2:] half_crop = crop // 2 pad = -torch.stack((half_crop, half_crop)).t().flatten() skip_connection = F.pad(skip_connection, pad.tolist()) return skip_connection class BCIN(nn.Module): def __init__(self, num_features, domain_code_size, affine=True, instance_norm=False, batch_norm=False): super(BCIN, self).__init__() self.W = nn.Parameter(torch.rand(domain_code_size), requires_grad=affine) self.b = nn.Parameter(torch.rand(1), requires_grad=affine) self.activation = nn.Tanh() self.instance_norm = instance_norm if self.instance_norm: print('Using instance_norm instead of BCIN') self.i_norm = torch.nn.InstanceNorm2d(num_features=num_features) self.batch_norm = batch_norm if self.instance_norm: print('Using batch_norm instead of BCIN') self.b_norm = torch.nn.BatchNorm2d(num_features=num_features) def forward(self, x, domain_code): x_var = torch.sqrt(torch.var(x, (1,2,3))) x_mean = torch.mean(x, (1,2,3)) bias = torch.matmul(domain_code, self.W) * self.b bias_scaled = self.activation(bias) if self.instance_norm: return self.i_norm(x) if self.batch_norm: return self.b_norm(x) return ((x-x_mean[:,None,None,None]) / x_var[:,None,None,None]) + bias_scaled[:,None,None,None] class MultiInSequential(nn.Sequential): def forward(self, *input): for module in self._modules.values(): input = module(*input) return input
true
true
f703e668f221af1cbe47550132535735de0842cf
4,121
py
Python
e2xgrader/server_extensions/assignment_list/handlers.py
divindevaiah/e2xgrader
19eb4662e4eee5ddef673097517e4bd4fb469e62
[ "MIT" ]
2
2021-10-02T10:48:47.000Z
2022-03-02T14:00:48.000Z
e2xgrader/server_extensions/assignment_list/handlers.py
divindevaiah/e2xgrader
19eb4662e4eee5ddef673097517e4bd4fb469e62
[ "MIT" ]
70
2020-10-23T16:42:01.000Z
2022-03-14T16:33:54.000Z
e2xgrader/server_extensions/assignment_list/handlers.py
divindevaiah/e2xgrader
19eb4662e4eee5ddef673097517e4bd4fb469e62
[ "MIT" ]
10
2020-11-22T16:36:16.000Z
2022-03-02T15:51:24.000Z
"""Tornado handlers for nbgrader assignment list web service.""" import os import json import contextlib import traceback from tornado import web from notebook.utils import url_path_join as ujoin from nbgrader.exchange import ExchangeFactory from nbgrader.coursedir import CourseDirectory from nbgrader.auth import Authenticator from nbgrader.server_extensions.assignment_list.handlers import ( AssignmentList, default_handlers, BaseAssignmentHandler, ) static = os.path.join(os.path.dirname(__file__), "static") @contextlib.contextmanager def chdir(dirname): currdir = os.getcwd() os.chdir(dirname) yield os.chdir(currdir) class E2xAssignmentList(AssignmentList): def submit_assignment(self, course_id, assignment_id): with self.get_assignment_dir_config() as config: try: config = self.load_config() config.CourseDirectory.course_id = course_id config.CourseDirectory.assignment_id = assignment_id coursedir = CourseDirectory(config=config) authenticator = Authenticator(config=config) submit = ExchangeFactory(config=config).Submit( coursedir=coursedir, authenticator=authenticator, config=config ) retval = submit.start() hashcode = "Exchange not set up for hashcode" timestamp = "Exchange not set up for timestamp" if retval and len(retval) == 2: hashcode, timestamp = retval except Exception: self.log.error(traceback.format_exc()) retvalue = {"success": False, "value": traceback.format_exc()} else: retvalue = { "success": True, "hashcode": hashcode, "timestamp": timestamp, } self.log.info(retvalue) return retvalue class AssignmentActionHandler(BaseAssignmentHandler): @web.authenticated def post(self, action): if action == "fetch": assignment_id = self.get_argument("assignment_id") course_id = self.get_argument("course_id") self.manager.fetch_assignment(course_id, assignment_id) self.finish(json.dumps(self.manager.list_assignments(course_id=course_id))) elif action == "submit": assignment_id = self.get_argument("assignment_id") course_id = self.get_argument("course_id") output = self.manager.submit_assignment(course_id, assignment_id) if output["success"]: response = self.manager.list_assignments(course_id=course_id) response["hashcode"] = output["hashcode"] response["timestamp"] = output["timestamp"] self.finish(json.dumps(response)) else: self.finish(json.dumps(output)) elif action == "fetch_feedback": assignment_id = self.get_argument("assignment_id") course_id = self.get_argument("course_id") self.manager.fetch_feedback(course_id, assignment_id) self.finish(json.dumps(self.manager.list_assignments(course_id=course_id))) # ----------------------------------------------------------------------------- # URL to handler mappings # ----------------------------------------------------------------------------- _assignment_action_regex = r"(?P<action>fetch|submit|fetch_feedback)" e2x_default_handlers = [ (r"/assignments/%s" % _assignment_action_regex, AssignmentActionHandler), ] def load_jupyter_server_extension(nbapp): """Load the nbserver""" nbapp.log.info("Loading the assignment_list e2xgrader serverextension") webapp = nbapp.web_app webapp.settings["assignment_list_manager"] = E2xAssignmentList(parent=nbapp) base_url = webapp.settings["base_url"] webapp.add_handlers( ".*$", [ (ujoin(base_url, pat), handler) for pat, handler in e2x_default_handlers + default_handlers ], )
34.341667
87
0.61587
import os import json import contextlib import traceback from tornado import web from notebook.utils import url_path_join as ujoin from nbgrader.exchange import ExchangeFactory from nbgrader.coursedir import CourseDirectory from nbgrader.auth import Authenticator from nbgrader.server_extensions.assignment_list.handlers import ( AssignmentList, default_handlers, BaseAssignmentHandler, ) static = os.path.join(os.path.dirname(__file__), "static") @contextlib.contextmanager def chdir(dirname): currdir = os.getcwd() os.chdir(dirname) yield os.chdir(currdir) class E2xAssignmentList(AssignmentList): def submit_assignment(self, course_id, assignment_id): with self.get_assignment_dir_config() as config: try: config = self.load_config() config.CourseDirectory.course_id = course_id config.CourseDirectory.assignment_id = assignment_id coursedir = CourseDirectory(config=config) authenticator = Authenticator(config=config) submit = ExchangeFactory(config=config).Submit( coursedir=coursedir, authenticator=authenticator, config=config ) retval = submit.start() hashcode = "Exchange not set up for hashcode" timestamp = "Exchange not set up for timestamp" if retval and len(retval) == 2: hashcode, timestamp = retval except Exception: self.log.error(traceback.format_exc()) retvalue = {"success": False, "value": traceback.format_exc()} else: retvalue = { "success": True, "hashcode": hashcode, "timestamp": timestamp, } self.log.info(retvalue) return retvalue class AssignmentActionHandler(BaseAssignmentHandler): @web.authenticated def post(self, action): if action == "fetch": assignment_id = self.get_argument("assignment_id") course_id = self.get_argument("course_id") self.manager.fetch_assignment(course_id, assignment_id) self.finish(json.dumps(self.manager.list_assignments(course_id=course_id))) elif action == "submit": assignment_id = self.get_argument("assignment_id") course_id = self.get_argument("course_id") output = self.manager.submit_assignment(course_id, assignment_id) if output["success"]: response = self.manager.list_assignments(course_id=course_id) response["hashcode"] = output["hashcode"] response["timestamp"] = output["timestamp"] self.finish(json.dumps(response)) else: self.finish(json.dumps(output)) elif action == "fetch_feedback": assignment_id = self.get_argument("assignment_id") course_id = self.get_argument("course_id") self.manager.fetch_feedback(course_id, assignment_id) self.finish(json.dumps(self.manager.list_assignments(course_id=course_id))) _assignment_action_regex = r"(?P<action>fetch|submit|fetch_feedback)" e2x_default_handlers = [ (r"/assignments/%s" % _assignment_action_regex, AssignmentActionHandler), ] def load_jupyter_server_extension(nbapp): nbapp.log.info("Loading the assignment_list e2xgrader serverextension") webapp = nbapp.web_app webapp.settings["assignment_list_manager"] = E2xAssignmentList(parent=nbapp) base_url = webapp.settings["base_url"] webapp.add_handlers( ".*$", [ (ujoin(base_url, pat), handler) for pat, handler in e2x_default_handlers + default_handlers ], )
true
true
f703e74b18a0079809bb062fa0b290421b09da2b
1,815
py
Python
os_xenapi/client/exception.py
mail2nsrajesh/os-xenapi
7ac84ec31dd1df6af90b2bf333253848cb4de2c3
[ "Apache-2.0" ]
null
null
null
os_xenapi/client/exception.py
mail2nsrajesh/os-xenapi
7ac84ec31dd1df6af90b2bf333253848cb4de2c3
[ "Apache-2.0" ]
null
null
null
os_xenapi/client/exception.py
mail2nsrajesh/os-xenapi
7ac84ec31dd1df6af90b2bf333253848cb4de2c3
[ "Apache-2.0" ]
null
null
null
# Copyright 2016 Citrix Systems # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from os_xenapi.client.i18n import _ class OsXenApiException(Exception): """Base OsXenapi Exception To correctly use this class, inherit from it and define a 'msg_fmt' property. That msg_fmt will get printf'd with the keyword arguments provided to the constructor. """ msg_fmt = _("An unknown exception occurred.") code = 500 def __init__(self, message=None, **kwargs): self.kwargs = kwargs if 'code' not in self.kwargs: try: self.kwargs['code'] = self.code except AttributeError: pass if not message: message = self.msg_fmt % kwargs self.message = message super(OsXenApiException, self).__init__(message) def format_message(self): # NOTE(mrodden): use the first argument to the python Exception object # which should be our full NovaException message, (see __init__) return self.args[0] class PluginRetriesExceeded(OsXenApiException): msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.") class SessionLoginTimeout(OsXenApiException): msg_fmt = _("Unable to log in to XenAPI (is the Dom0 disk full?)")
32.410714
78
0.680441
from os_xenapi.client.i18n import _ class OsXenApiException(Exception): msg_fmt = _("An unknown exception occurred.") code = 500 def __init__(self, message=None, **kwargs): self.kwargs = kwargs if 'code' not in self.kwargs: try: self.kwargs['code'] = self.code except AttributeError: pass if not message: message = self.msg_fmt % kwargs self.message = message super(OsXenApiException, self).__init__(message) def format_message(self): return self.args[0] class PluginRetriesExceeded(OsXenApiException): msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.") class SessionLoginTimeout(OsXenApiException): msg_fmt = _("Unable to log in to XenAPI (is the Dom0 disk full?)")
true
true
f703e984390443eba9c7a013798f01dd9ac3fa6f
2,924
py
Python
dataset/7scenes-export/7scenes-export-color.py
hashi0203/deep-video-mvs
b3943a9249d522dca3e6cd603e427f611cc7bad5
[ "MIT" ]
null
null
null
dataset/7scenes-export/7scenes-export-color.py
hashi0203/deep-video-mvs
b3943a9249d522dca3e6cd603e427f611cc7bad5
[ "MIT" ]
null
null
null
dataset/7scenes-export/7scenes-export-color.py
hashi0203/deep-video-mvs
b3943a9249d522dca3e6cd603e427f611cc7bad5
[ "MIT" ]
null
null
null
import os import shutil from multiprocessing.pool import Pool import cv2 import numpy as np from functools import partial from path import Path def process_scene(input_directory, output_folder): K = np.array([[525.0, 0.0, 320.0], [0.0, 525.0, 240.0], [0.0, 0.0, 1.0]]) print("processing", input_directory) image_filenames = sorted(input_directory.files("*color.png")) pose_filenames = sorted(input_directory.files("*pose.txt")) poses = [] for pose_filename in pose_filenames: pose = np.loadtxt(pose_filename) poses.append(pose) scene = input_directory.split("/")[-2] seq = input_directory.split("/")[-1] current_output_dir = output_folder / scene + "-" + seq if os.path.isdir(current_output_dir): if os.path.exists("{}/poses.txt".format(current_output_dir)) and os.path.exists("{}/K.txt".format(current_output_dir)): return scene else: shutil.rmtree(current_output_dir) os.mkdir(current_output_dir) os.mkdir(os.path.join(current_output_dir, "images")) output_poses = [] for current_index in range(len(image_filenames)): image = cv2.imread(image_filenames[current_index]) output_poses.append(poses[current_index].ravel().tolist()) cv2.imwrite("{}/images/{}.png".format(current_output_dir, str(current_index).zfill(6)), image, [cv2.IMWRITE_PNG_COMPRESSION, 3]) output_poses = np.array(output_poses) np.savetxt("{}/poses.txt".format(current_output_dir), output_poses) np.savetxt("{}/K.txt".format(current_output_dir), K) return scene def main(): input_folder = Path("/home/share/dataset/7scenes") output_folder = Path("/home/nhsmt1123/master-thesis/deep-video-mvs/data/7scenes") input_directories = [ input_folder / "redkitchen/seq-01", input_folder / "redkitchen/seq-07", input_folder / "chess/seq-01", input_folder / "chess/seq-02", input_folder / "heads/seq-02", input_folder / "fire/seq-01", input_folder / "fire/seq-02", input_folder / "office/seq-01", input_folder / "office/seq-03", input_folder / "pumpkin/seq-03", input_folder / "pumpkin/seq-06", input_folder / "stairs/seq-02", input_folder / "stairs/seq-06", # train input_folder / "redkitchen/seq-03", input_folder / "chess/seq-03", input_folder / "heads/seq-01", input_folder / "fire/seq-03", input_folder / "fire/seq-04", input_folder / "office/seq-02", input_folder / "pumpkin/seq-01", input_folder / "stairs/seq-01"] # test pool = Pool(6) for finished_scene in pool.imap_unordered(partial(process_scene, output_folder=output_folder), input_directories): print("finished", finished_scene) pool.join() pool.close() if __name__ == '__main__': main()
33.227273
136
0.647059
import os import shutil from multiprocessing.pool import Pool import cv2 import numpy as np from functools import partial from path import Path def process_scene(input_directory, output_folder): K = np.array([[525.0, 0.0, 320.0], [0.0, 525.0, 240.0], [0.0, 0.0, 1.0]]) print("processing", input_directory) image_filenames = sorted(input_directory.files("*color.png")) pose_filenames = sorted(input_directory.files("*pose.txt")) poses = [] for pose_filename in pose_filenames: pose = np.loadtxt(pose_filename) poses.append(pose) scene = input_directory.split("/")[-2] seq = input_directory.split("/")[-1] current_output_dir = output_folder / scene + "-" + seq if os.path.isdir(current_output_dir): if os.path.exists("{}/poses.txt".format(current_output_dir)) and os.path.exists("{}/K.txt".format(current_output_dir)): return scene else: shutil.rmtree(current_output_dir) os.mkdir(current_output_dir) os.mkdir(os.path.join(current_output_dir, "images")) output_poses = [] for current_index in range(len(image_filenames)): image = cv2.imread(image_filenames[current_index]) output_poses.append(poses[current_index].ravel().tolist()) cv2.imwrite("{}/images/{}.png".format(current_output_dir, str(current_index).zfill(6)), image, [cv2.IMWRITE_PNG_COMPRESSION, 3]) output_poses = np.array(output_poses) np.savetxt("{}/poses.txt".format(current_output_dir), output_poses) np.savetxt("{}/K.txt".format(current_output_dir), K) return scene def main(): input_folder = Path("/home/share/dataset/7scenes") output_folder = Path("/home/nhsmt1123/master-thesis/deep-video-mvs/data/7scenes") input_directories = [ input_folder / "redkitchen/seq-01", input_folder / "redkitchen/seq-07", input_folder / "chess/seq-01", input_folder / "chess/seq-02", input_folder / "heads/seq-02", input_folder / "fire/seq-01", input_folder / "fire/seq-02", input_folder / "office/seq-01", input_folder / "office/seq-03", input_folder / "pumpkin/seq-03", input_folder / "pumpkin/seq-06", input_folder / "stairs/seq-02", input_folder / "stairs/seq-06", input_folder / "redkitchen/seq-03", input_folder / "chess/seq-03", input_folder / "heads/seq-01", input_folder / "fire/seq-03", input_folder / "fire/seq-04", input_folder / "office/seq-02", input_folder / "pumpkin/seq-01", input_folder / "stairs/seq-01"] pool = Pool(6) for finished_scene in pool.imap_unordered(partial(process_scene, output_folder=output_folder), input_directories): print("finished", finished_scene) pool.join() pool.close() if __name__ == '__main__': main()
true
true
f703e990bc149d473a221b9a2f1254c9d2a428c5
891
py
Python
interactive_map.py
webmsgr/OneLifeMapper
9c94f2820c8605372ff4127f8fb2dfd411db3388
[ "MIT" ]
null
null
null
interactive_map.py
webmsgr/OneLifeMapper
9c94f2820c8605372ff4127f8fb2dfd411db3388
[ "MIT" ]
2
2019-10-18T19:19:04.000Z
2019-10-18T19:59:09.000Z
interactive_map.py
webmsgr/OneLifeMapper
9c94f2820c8605372ff4127f8fb2dfd411db3388
[ "MIT" ]
null
null
null
import pygame import math import glob import os tilesize = 128 # pixels per tile def tiletosurface(tile): pass def maptosurface(sx,sy,ex,ey,oholmap): pass def main(windowsize,tilepipe,OHOLMap): wt = math.floor(windowsize/tilesize) cx,cy,first = 0,0,True if OHOLMap.data != {}: for x in OHOLMap.data: for y in OHOLMap.data[x]: if not first: break cx,cy = x,y first = False print("Loading sprites") sprites = glob.glob("./OneLifeData/sprites/*.tga") loadedsprites = {} print("Found {} sprites, loading...".format(len(sprites))) for sprite in sprites: spriteid = os.path.basename(sprite).split(".")[0] loadedsprites[spriteid] = pygame.image.load(sprite) # do other loading things... tilepipe.send("READY") # main loop goes here
26.205882
62
0.597082
import pygame import math import glob import os tilesize = 128 def tiletosurface(tile): pass def maptosurface(sx,sy,ex,ey,oholmap): pass def main(windowsize,tilepipe,OHOLMap): wt = math.floor(windowsize/tilesize) cx,cy,first = 0,0,True if OHOLMap.data != {}: for x in OHOLMap.data: for y in OHOLMap.data[x]: if not first: break cx,cy = x,y first = False print("Loading sprites") sprites = glob.glob("./OneLifeData/sprites/*.tga") loadedsprites = {} print("Found {} sprites, loading...".format(len(sprites))) for sprite in sprites: spriteid = os.path.basename(sprite).split(".")[0] loadedsprites[spriteid] = pygame.image.load(sprite) tilepipe.send("READY")
true
true
f703e9c0d1a0c2a1c793b2889510d7c6d314ac38
58,491
py
Python
newscout_web/api/v1/views.py
Arch-020/newscout_web
76b3885f27a346806baf60f5d9a72931140f9e78
[ "Apache-2.0" ]
3
2019-10-30T07:15:59.000Z
2021-12-26T20:59:05.000Z
newscout_web/api/v1/views.py
Arch-020/newscout_web
76b3885f27a346806baf60f5d9a72931140f9e78
[ "Apache-2.0" ]
322
2019-10-30T07:12:36.000Z
2022-02-10T10:55:32.000Z
newscout_web/api/v1/views.py
Arch-020/newscout_web
76b3885f27a346806baf60f5d9a72931140f9e78
[ "Apache-2.0" ]
7
2019-10-30T13:34:54.000Z
2021-12-27T12:08:07.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.http import Http404 from core.models import (Category, Article, Source, BaseUserProfile, BookmarkArticle, ArticleLike, HashTag, Menu, Notification, Devices, SocialAccount, Category, CategoryAssociation, TrendingArticle, Domain, DailyDigest, DraftMedia, Comment, Subscription) from rest_framework.authtoken.models import Token from rest_framework.views import APIView from .serializers import (CategorySerializer, ArticleSerializer, UserSerializer, SourceSerializer, LoginUserSerializer, BaseUserProfileSerializer, BookmarkArticleSerializer, ArticleLikeSerializer, HashTagSerializer, MenuSerializer, NotificationSerializer, TrendingArticleSerializer, ArticleCreateUpdateSerializer, DraftMediaSerializer, CommentSerializer, CommentListSerializer, SubsMediaSerializer, UserProfileSerializer) from rest_framework.response import Response from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework import filters from newscout_web.constants import SOCIAL_AUTH_PROVIDERS from django.db.models import Q from rest_framework.exceptions import APIException from collections import OrderedDict from rest_framework import generics, viewsets from rest_framework.pagination import CursorPagination from rest_framework.generics import ListAPIView from rest_framework.parsers import JSONParser from django.core.mail import EmailMultiAlternatives from django.conf import settings from datetime import datetime, timedelta from django.db.models import Count, Max, Min import pytz import uuid from core.utils import es, ingest_to_elastic, delete_from_elastic from elasticsearch_dsl import Search import math from rest_framework.utils.urls import replace_query_param from google.auth.transport import requests as grequests from google.oauth2 import id_token import facebook from .exception_handler import (create_error_response, TokenIDMissing, ProviderMissing, SocialAuthTokenException) import logging import operator from functools import reduce import tweepy import json from captcha.models import CaptchaStore from captcha.helpers import captcha_image_url log = logging.getLogger(__name__) def create_response(response_data): """ method used to create response data in given format """ response = OrderedDict() response["header"] = {"status": "1"} response["body"] = response_data return response def create_serializer_error_response(errors): """ methos is used to create error response for serializer errors """ error_list = [] for k, v in errors.items(): if isinstance(v, dict): _, v = v.popitem() d = {} d["field"] = k d["field_error"] = v[0] error_list.append(d) return OrderedDict({"header": {"status": "0"}, "errors": { "errorList": error_list}}) class SignUpAPIView(APIView): permission_classes = (AllowAny,) def post(self, request, *args, **kwargs): user_serializer = UserSerializer(data=request.data) if user_serializer.is_valid(): user_serializer.save() return Response(create_response({"Msg": "sign up successfully"})) else: return Response( create_serializer_error_response(user_serializer.errors), status=403) class LoginFieldsRequired(APIException): """ api exception for no user found """ status_code = 401 default_detail = ("username and password are required") default_code = "username_and_password" class LoginAPIView(generics.GenericAPIView): serializer_class = LoginUserSerializer permission_classes = (AllowAny,) def post(self, request, format=None): serializer = LoginUserSerializer(data=request.data) if not serializer.is_valid(): res_data = create_serializer_error_response(serializer.errors) return Response(res_data, status=403) user = BaseUserProfile.objects.filter(email=request.data["email"]).first() device_name = request.data.get("device_name") device_id = request.data.get("device_id") if device_id and device_name: device, _ = Devices.objects.get_or_create(user=user, device_name=device_name, device_id=device_id) notification_obj, _ = Notification.objects.get_or_create(device=device) notification = NotificationSerializer(notification_obj) user_serializer = BaseUserProfileSerializer(user) token, _ = Token.objects.get_or_create(user=user) data = user_serializer.data data["token"] = token.key if device_id and device_name: data["breaking_news"] = notification.data['breaking_news'] data["daily_edition"] = notification.data['daily_edition'] data["personalized"] = notification.data['personalized'] response_data = create_response({"user": data}) return Response(response_data) class LogoutAPIView(APIView): permission_classes = (IsAuthenticated,) def get(self, request, format=None): request.user.auth_token.delete() return Response(create_response({"Msg": "User has been logged out"})) class UserHashTagAPIView(APIView): """ Save new tags and remove older tags based on user selection """ permission_classes = (IsAuthenticated,) parser_classes = (JSONParser,) def post(self, request, format=None): user = self.request.user hash_tags = request.data["tags"] user_tags = HashTag.objects.filter(name__in=hash_tags) if user_tags: user.passion.clear() user.passion.add(*user_tags) return Response(create_response({"Msg": "Successfully saved tags"})) return Response(create_error_response({"Msg": "Invalid tags"}), status=400) class CategoryListAPIView(APIView): permission_classes = (AllowAny,) def get(self, request, format=None, *args, **kwargs): """ List all news category """ categories = CategorySerializer(Category.objects.all(), many=True) return Response(create_response({"categories": categories.data})) def post(self, request, format=None): """ Save new category to database """ if request.user.is_authenticated: serializer = CategorySerializer(data=request.data, many=True) if serializer.is_valid(): serializer.save() return Response(create_response(serializer.data)) return Response(create_error_response(serializer.errors), status=400) raise Http404 def put(self, request, format=None): """ update category in database """ if request.user.is_authenticated: _id = request.data.get("id") category = Category.objects.get(id=_id) serializer = CategorySerializer(category, data=request.data) if serializer.is_valid(): serializer.save() return Response(create_response(serializer.data)) return Response(create_error_response(serializer.errors), status=400) raise Http404 class SourceListAPIView(APIView): permission_classes = (AllowAny,) def get(self, request, format=None, *args, **kwargs): """ List all the sources """ source = SourceSerializer(Source.objects.all(), many=True) return Response(create_response({"results": source.data})) class NoarticleFound(APIException): """ api exception for no user found """ status_code = 404 default_detail = ("Article does not exist") default_code = "no_article_found" class PostpageNumberPagination(CursorPagination): page_size = 10 page_size_query_param = 'page_size' ordering = '-created_at' class ArticleListAPIView(ListAPIView): serializer_class = ArticleSerializer permission_classes = (AllowAny,) pagination_class = PostpageNumberPagination filter_backends = (filters.OrderingFilter,) ordering = ('-published_on',) def get_queryset(self): q = self.request.GET.get("q", "") tag = self.request.GET.getlist("tag", "") category = self.request.GET.getlist("category", "") source = self.request.GET.getlist("source", "") queryset = Article.objects.all() if self.request.user.domain: queryset = queryset.filter(domain=self.request.user.domain) else: queryset = Article.objects.none() if source: queryset = queryset.filter(source__name__in=source) if category: queryset = queryset.filter(category__name__in=category) if tag: queryset = queryset.filter(hash_tags__name__in=tag) if q: q_list = q.split(" ") condition_1 = reduce(operator.or_, [Q(title__icontains=s) for s in q_list]) condition_2 = reduce(operator.or_, [Q(full_text__icontains=s) for s in q_list]) queryset = queryset.filter(condition_1 | condition_2) return queryset def list(self, request, *args, **kwargs): queryset = self.filter_queryset(self.get_queryset()) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) if serializer.data: paginated_response = self.get_paginated_response(serializer.data) return Response(create_response(paginated_response.data)) else: return Response(create_error_response({"Msg": "News Doesn't Exist"}), status=400) class ArticleDetailAPIView(APIView): permission_classes = (AllowAny,) def get(self, request, format=None, *args, **kwargs): slug = self.kwargs.get("slug", "") user = self.request.user article = Article.objects.filter(slug=slug).first() has_subscribed = False if not self.request.user.is_anonymous and \ Subscription.objects.filter( user=self.request.user).exlcude(subs_type='Basic').exists(): has_subscribed = True try: next_article = Article.objects.filter(id__gt=article.id).order_by("id")[0:1].get().slug except Exception as error: print(error) next_article = Article.objects.aggregate(Min("id"))['id__min'] try: prev_article = Article.objects.filter(id__gt=article.id).order_by("-id")[0:1].get().slug except Exception as error: print(error) prev_article = Article.objects.aggregate(Max("id"))['id__max'] if article: response_data = ArticleSerializer(article, context={ "hash_tags_list": True, 'has_subscribed': has_subscribed}).data if not user.is_anonymous: book_mark_article = BookmarkArticle.objects.filter( user=user, article=article).first() like_article = ArticleLike.objects.filter( user=user, article=article).first() if book_mark_article: response_data["isBookMark"] = True else: response_data["isBookMark"] = False if like_article: response_data["isLike"] = like_article.is_like else: response_data["isLike"] = 2 return Response(create_response({ "article": response_data, "next_article": next_article, "prev_article": prev_article})) raise NoarticleFound def post(self, request, *args, **kwargs): if request.user.is_authenticated: article_id = self.request.POST.get("article_id", "") is_like = self.request.POST.get("isLike", "") user = self.request.user article = Article.objects.filter(id=article_id).first() if article: if is_like and int(is_like) <= 2: article_like, created = ArticleLike.objects.get_or_create( user=user, article=article) article_like.is_like = is_like article_like.save() serializer = ArticleLikeSerializer(article_like) return Response(create_response({ "Msg": "Article like status changed", "article": serializer.data })) else: return Response(create_error_response({ "Msg": "Invalid Input" })) else: return Response(create_error_response({"Msg": "News doesn't exist"}), status=400) raise Http404 class ArticleBookMarkAPIView(APIView): permission_classes = (IsAuthenticated,) def post(self, request, *args, **kwargs): if request.data: article_id = request.data["article_id"] else: article_id = self.request.POST.get("article_id", "") user = self.request.user if article_id: article = Article.objects.filter(id=article_id).first() if article: bookmark_article, created = \ BookmarkArticle.objects.get_or_create(user=user, article=article) if not created: del_bookmark_article = BookmarkArticleSerializer(bookmark_article) del_bookmark = del_bookmark_article.data del_bookmark["status"] = 0 bookmark_article.delete() return Response(create_response({ "Msg": "Article removed from bookmark list", "bookmark_article": del_bookmark })) else: bookmark_article = BookmarkArticleSerializer(bookmark_article) return Response(create_response({ "Msg": "Article bookmarked successfully", "bookmark_article": bookmark_article.data })) raise NoarticleFound class ArticleRecommendationsAPIView(APIView): permission_classes = (AllowAny,) def format_response(self, response): results = [] if response['hits']['hits']: for result in response['hits']['hits']: results.append(result["_source"]) return results def get(self, request, *args, **kwargs): article_id = self.kwargs.get("article_id", "") if article_id: results = es.search(index='recommendation', body={"query": {"match": {"id": int(article_id)}}}) if results['hits']['hits']: recommendation = results['hits']['hits'][0]['_source']['recommendation'] search_results = es.search(index='article', body={ "query": {"terms": {"id": recommendation}}, "size": 25}) return Response(create_response({ "results": self.format_response(search_results) })) return Response(create_error_response({ "Msg": "Error generating recommendation" })) class ForgotPasswordAPIView(APIView): permission_classes = (AllowAny,) def genrate_password(self, password_length=10): """ Returns a random pasword of length password_length. """ random = str(uuid.uuid4()) random = random.upper() random = random.replace("-", "") return random[0:password_length] def send_mail_to_user(self, email, password, first_name="", last_name=""): username = first_name + " " + last_name email_subject = 'NewsPost: Forgot Password Request' email_body = """ <html> <head> </head> <body> <p> Hello """ + username + """,<br><br><b> """ + password + """</b> is your new password <br> <br> Thanks,<br> The NewsPost Team<br> </p> </body> </html>""" msg = EmailMultiAlternatives( email_subject, '', settings.EMAIL_FROM, [email]) ebody = email_body msg.attach_alternative(ebody, "text/html") msg.send(fail_silently=False) def post(self, request, *args, **kwargs): email = request.data["email"] if email: user = BaseUserProfile.objects.filter(email=email) if user: user = user.first() password = self.genrate_password() self.send_mail_to_user( email, password, user.first_name, user.last_name) user.set_password(password) user.save() return Response(create_response({ "Msg": "New password sent to your email" })) return Response(create_error_response({ "Msg": "Email Does Not Exist" })) class ChangePasswordAPIView(APIView): permission_classes = (IsAuthenticated,) def post(self, request, *args, **kwargs): if request.data: password = request.data["password"] old_password = request.data["old_password"] confirm_password = request.data["confirm_password"] else: password = self.request.POST.get("password", "") old_password = self.request.POST.get("old_password", "") confirm_password = self.request.POST.get("confirm_password", "") user = self.request.user if old_password: if not user.check_password(old_password): msg = "Old Password Does Not Match With User" return Response(create_error_response({ "Msg": msg, "field": "old_password" })) if confirm_password != password: msg = "Password and Confirm Password does not match" return Response(create_error_response({ "Msg": msg, "field": "confirm_password" })) if old_password == password: msg = "New password should not same as Old password" return Response(create_error_response({ "Msg": msg, "field": "password" })) if user and password: user.set_password(password) user.save() return Response(create_response({ "Msg": "Password changed successfully", "field": "confirm_password" })) else: return Response(create_error_response({ "Msg": "Password field is required", "field": "password" })) else: return Response(create_error_response({ "Msg": "Old Password field is required", "field": "old_password" })) class BookmarkArticleAPIView(APIView): """ This class is used to get user bookmark list """ permission_classes = (IsAuthenticated,) def get(self, request): user = self.request.user bookmark_list = BookmarkArticleSerializer(BookmarkArticle.objects.filter(user=user), many=True) return Response(create_response({"results": bookmark_list.data})) class ArticleLikeAPIView(APIView): """ This class is used to get user articles """ permission_classes = (IsAuthenticated,) def get(self, request): like_list = [0, 1] user = self.request.user article_list = ArticleLikeSerializer(ArticleLike.objects.filter(user=user, is_like__in=like_list), many=True) return Response(create_response({"results": article_list.data})) class HashTagAPIView(ListAPIView): serializer_class = HashTagSerializer permission_classes = (AllowAny,) def get_queryset(self): weekly = self.request.GET.get("weekly", "") monthly = self.request.GET.get("monthly", "") end = datetime.utcnow() pst = pytz.timezone('Asia/Kolkata') end = pst.localize(end) utc = pytz.UTC end = end.astimezone(utc) articles = Article.objects.all() queryset = HashTag.objects.all() if weekly: weekly = int(weekly) start = end - timedelta(days=7 * weekly) hash_tags = articles.filter(published_on__range=(start, end)).values( 'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10] for hashtag in hash_tags: hashtag['name'] = hashtag.pop('hash_tags__name') queryset = hash_tags if monthly: monthly = int(monthly) start = end - timedelta(days=30 * monthly) hash_tags = articles.filter(published_on__range=(start, end)).values( 'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10] for hashtag in hash_tags: hashtag['name'] = hashtag.pop('hash_tags__name') queryset = hash_tags if not weekly and not monthly: start = end - timedelta(days=1) hash_tags = articles.filter(published_on__range=(start, end)).values( 'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10] for hashtag in hash_tags: hashtag['name'] = hashtag.pop('hash_tags__name') queryset = hash_tags return queryset def list(self, request, *args, **kwargs): queryset = self.filter_queryset(self.get_queryset()) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) if serializer.data: paginated_response = self.get_paginated_response(serializer.data) return Response(create_response(paginated_response.data)) else: return Response(create_error_response({"Msg": "No trending tags"}), status=400) serializer = self.get_serializer(queryset, many=True) return Response(create_response(serializer.data)) class ArticleSearchAPI(APIView): """ this view is used for article search and filter """ permission_classes = (AllowAny,) def format_response(self, response): results = [] filters = {} if response.hits.hits: for result in response.hits.hits: source = result["_source"] if 'highlight' in result: if 'title' in result['highlight']: source['title'] = " ".join(result['highlight']['title']) if 'blurb' in result['highlight']: source['blurb'] = " ".join(result['highlight']['blurb']) results.append(source) if response.aggregations.category.buckets: filters["category"] = sorted( response.aggregations.category.buckets._l_, key=operator.itemgetter("key")) if response.aggregations.source.buckets: filters["source"] = sorted( response.aggregations.source.buckets._l_, key=operator.itemgetter("key")) if response.aggregations.hash_tags.buckets: filters["hash_tags"] = sorted( response.aggregations.hash_tags.buckets._l_, key=operator.itemgetter("key")) return results, filters def get(self, request): page = self.request.GET.get("page", "1") if page.isdigit(): page = int(page) else: page = 1 size = self.request.GET.get("rows", "20") if size.isdigit(): size = int(size) else: size = 20 query = self.request.GET.get("q", "") source = self.request.GET.getlist("source", []) category = self.request.GET.getlist("category", []) domain = self.request.GET.getlist("domain", []) tags = self.request.GET.getlist("tag", []) sort = self.request.GET.get("sort", "desc") if not domain: return Response(create_serializer_error_response({"domain": ["Domain id is required"]})) # mort like this for related queries mlt_fields = ["has_tags"] if source: mlt_fields = ["has_tags", "source", "domain"] mlt = Search(using=es, index="article").query("more_like_this", fields=mlt_fields, like=query, min_term_freq=1, max_query_terms=12).source(mlt_fields) mlt.execute() sr = Search(using=es, index="article") # highlight title and blurb containing query sr = sr.highlight("title", "blurb", fragment_size=20000) # generate elastic search query must_query = [{"wildcard": {"cover_image": "*"}}] should_query = [] if query: query = query.lower() must_query.append({"multi_match": {"query": query, "fields": ["title", "blurb"], 'type': 'phrase'}}) if tags: tags = [tag.lower().replace("-", " ") for tag in tags] for tag in tags: sq = {"match_phrase": {"hash_tags": tag}} should_query.append(sq) if must_query: sr = sr.query("bool", must=must_query) if should_query: if len(should_query) > 1: sr = sr.filter("bool", should=should_query) else: sr = sr.filter("bool", should=should_query[0]) if domain: sr = sr.filter("terms", domain=list(domain)) if category: cat_objs = Category.objects.filter(name__in=category) category = cat_objs.values_list("id", flat=True) cat_assn_objs = CategoryAssociation.objects.filter( parent_cat__in=cat_objs).values_list( "child_cat__id", flat=True) if cat_assn_objs: new_category = set(list(cat_assn_objs) + list(category)) sr = sr.filter("terms", category_id=list(new_category)) else: if category: sr = sr.filter("terms", category_id=list(category)) if source: source = [s.lower() for s in source] sr = sr.filter("terms", source__keyword=source) sr = sr.sort({"article_score": {"order": sort}}) sr = sr.sort({"published_on": {"order": sort}}) # pagination start = (page - 1) * size end = start + size sr = sr[start:end] # generate facets sr.aggs.bucket("category", "terms", field="category.keyword") sr.aggs.bucket("source", "terms", field="source.keyword") sr.aggs.bucket("hash_tags", "terms", field="hash_tags.keyword", size=50) # execute query response = sr.execute() results, filters = self.format_response(response) count = response["hits"]["total"] total_pages = math.ceil(count / size) url = request.build_absolute_uri() if end < count: next_page = page + 1 next_url = replace_query_param(url, "page", next_page) else: next_url = None if page != 1: previous_page = page - 1 previous_url = replace_query_param(url, "page", previous_page) else: previous_url = None data = { "results": results, "filters": filters, "count": count, "total_pages": total_pages, "current_page": page, "next": next_url, "previous": previous_url } return Response(create_response(data)) class MenuAPIView(APIView): """ This Api will return all the menus """ permission_classes = (AllowAny,) def get(self, request): domain_id = self.request.GET.get("domain") if not domain_id: return Response(create_error_response({"domain": ["Domain id is required"]})) domain = Domain.objects.filter(domain_id=domain_id).first() if not domain: return Response(create_error_response({"domain": ["Domain id is required"]})) menus = MenuSerializer(Menu.objects.filter(domain=domain), many=True) menus_list = menus.data new_menulist = [] for menu in menus_list: menu_dict = {} menu_dict['heading'] = menu new_menulist.append(menu_dict) return Response(create_response({'results': new_menulist})) class DevicesAPIView(APIView): """ this api will add device_id and device_name """ permission_classes = (IsAuthenticated,) def post(self, request, *args, **kwargs): user = self.request.user device_id = self.request.POST.get("device_id", "") device_name = self.request.POST.get("device_name", "") if not user.is_anonymous and device_id and device_name: user_device = Devices.objects.filter(user=user.pk) if user_device: user_device.update(device_id=device_id, device_name=device_name, user=user.id) return Response(create_response({"Msg": "Device successfully created"})) elif not user_device: get, created = Devices.objects.get_or_create(device_id=device_id, device_name=device_name, user=user.id) if created: return Response(create_response({"Msg": "Device successfully created"})) else: return Response(create_response({"Msg": "Device already exist"})) elif device_id and device_name: get, created = Devices.objects.get_or_create(device_id=device_id, device_name=device_name) if created: return Response(create_response({"Msg": "Device successfully created"})) else: return Response(create_response({"Msg": "Device already exist"})) else: return Response(create_error_response({"Msg": "device_id and device_name field are required"})) class NotificationAPIView(APIView): """ this api will add notification data """ permission_classes = (AllowAny,) def post(self, request): device_id = request.data["device_id"] device_name = request.data["device_name"] breaking_news = request.data["breaking_news"] daily_edition = request.data["daily_edition"] personalized = request.data["personalized"] device = Devices.objects.get(device_id=device_id, device_name=device_name) if breaking_news and daily_edition and personalized and device: notification = Notification.objects.filter(device=device) if notification: notification.update(breaking_news=breaking_news, daily_edition=daily_edition, personalized=personalized) return Response(create_response({"Msg": "Notification updated successfully"})) Notification.objects.create(breaking_news=breaking_news, daily_edition=daily_edition, personalized=personalized, device=device) return Response(create_response({"Msg": "Notification created successfully"})) else: return Response( create_error_response( {"Msg": "device_id, device_name, breaking_news, daily_edition and personalized are required"})) def get(self, request): device_id = request.GET.get("device_id") device_name = request.GET.get("device_name") device = Devices.objects.filter(device_id=device_id, device_name=device_name).first() if device: notification = NotificationSerializer(Notification.objects.fitler(device=device), many=True) return Response(create_response(notification.data)) return Response(create_error_response({"Msg": "Invalid device_id or device_name"})) class SocialLoginView(generics.GenericAPIView): """ this view is used for google social authentication and login """ permission_classes = (AllowAny,) serializer_class = BaseUserProfileSerializer def decode_google_token(self, token_id): """ this method is used to decode and verify google token """ request = grequests.Request() try: id_info = id_token.verify_oauth2_token(token_id, request) return id_info except Exception as e: log.debug("error in google token verification {0}".format(e)) return False def get_name_details(self, id_info): """ this methos is used to get first name and last name from id_info details """ first_name = last_name = "" if "name" in id_info: name = id_info.get("name") name_list = name.split(" ") first_name = name_list[0] if len(name_list) > 1: last_name = " ".join(name_list[1:]) if not first_name: if "given_name" in id_info: first_name = id_info.get("given_name") if not last_name: if "family_name" in id_info: last_name = id_info.get("family_name") return first_name, last_name def create_user_profile(self, first_name, last_name, username, email, image_url, sid, provider): """ this method is used to create base user profile object for given social account """ user = BaseUserProfile.objects.filter(email=email).first() created = "" if not user: user = BaseUserProfile.objects.create( first_name=first_name, last_name=last_name, email=email, username=username ) sa_obj, created = SocialAccount.objects.get_or_create( social_account_id=sid, image_url=image_url, user=user, provider=provider ) # create_profile_image.delay(sa_obj.id) return user, created def get_facebook_data(self, token_id): """ this method is used to get facebook user data from given access token """ graph = facebook.GraphAPI(access_token=token_id) try: res_data = graph.get_object( id='me?fields=email,id,first_name,last_name,name,picture.width(150).height(150)') return res_data except Exception as e: log.debug("error in facebook fetch data: {0}".format(e)) return False def get_facebook_name_details(self, profile_data): """ this method is used to get facebook first_name last_name from profile data """ name = first_name = last_name = "" if "first_name" in profile_data: first_name = profile_data.get("first_name") if "last_name" in profile_data: last_name = profile_data.get("last_name") if "name" in profile_data: name = profile_data.get("name") name_list = name.split(" ") if not first_name: first_name = name_list[0] if not last_name: last_name = " ".join(name[1:]) return first_name, last_name def get_user_serialize_data(self, email, device_id, device_name): """ this method will return customize user data """ user = BaseUserProfile.objects.filter(email=email).first() device = Devices.objects.filter(user=user.id) if device: device.update(device_name=device_name, device_id=device_id) else: device, created = Devices.objects.get_or_create(device_name=device_name, device_id=device_id) Devices.objects.filter(pk=device.pk).update(user=user) notification = NotificationSerializer(Notification.objects.get_or_create(device=device), many=True) token, _ = Token.objects.get_or_create(user=user) data = BaseUserProfileSerializer(user).data data["token"] = token.key data["breaking_news"] = notification.data[0]['breaking_news'] data["daily_edition"] = notification.data[0]['daily_edition'] data["personalized"] = notification.data[0]['personalized'] return data def post(self, request, *args, **kwargs): """ this is post method for collection google social auth data and generate authentication api token for user """ token_id = request.data.get("token_id") provider = request.data.get("provider") device_id = request.data.get("device_id") device_name = request.data.get("device_name") if not token_id: raise TokenIDMissing() if not provider: raise ProviderMissing() if not device_id: return Response(create_error_response({"Msg": "device_id is missing or Invalid device_id"})) if not device_name: return Response(create_error_response({"Msg": "device_name is missing or Invalid device_name"})) if provider not in SOCIAL_AUTH_PROVIDERS: raise ProviderMissing() if provider == "google": id_info = self.decode_google_token(token_id) if not id_info: raise SocialAuthTokenException() first_name, last_name = self.get_name_details(id_info) email = id_info.get("email", "") if not email: raise SocialAuthTokenException() username = email.split("@")[0] google_id = id_info.get("sub", "") image_url = id_info.get("picture", "") user, created = self.create_user_profile( first_name, last_name, username, email, image_url, google_id, provider) user_data = self.get_user_serialize_data(email, device_id, device_name) return Response(create_response({"user": user_data})) if provider == "facebook": profile_data = self.get_facebook_data(token_id) if not profile_data: raise SocialAuthTokenException() first_name, last_name = self.get_facebook_name_details( profile_data) email = profile_data.get("email") if not email: raise SocialAuthTokenException() username = username = email.split("@")[0] facebook_id = profile_data.get("id", "") image_url = "" if "picture" in profile_data: if "data" in profile_data["picture"]: image_url = profile_data["picture"]["data"]["url"] user, created = self.create_user_profile( first_name, last_name, username, email, image_url, facebook_id, provider) user_data = self.get_user_serialize_data(email, device_id, device_name) return Response(create_response({"user": user_data})) raise ProviderMissing() class TrendingArticleAPIView(APIView): permission_classes = (AllowAny,) def get(self, request, format=None, *args, **kwargs): """ List all the trending articles """ domain_id = self.request.GET.get("domain") if not domain_id: return Response(create_error_response({"domain": ["Domain id is required"]})) domain = Domain.objects.filter(domain_id=domain_id).first() if not domain: return Response(create_error_response({"domain": ["Invalid domain name"]})) source = TrendingArticleSerializer(TrendingArticle.objects.filter(domain=domain), many=True) return Response(create_response({"results": source.data})) class SocailMediaPublishing(): """ this class is to update news arrticles on social media """ def twitter(self, data): """ this function will tweet article title and its url in twitter """ try: auth = tweepy.OAuthHandler(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET) auth.set_access_token(settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET) api = tweepy.API(auth) api.update_status(data["title"] + "\n" + data["url"]) except Exception as e: print("Error in twitter post: ", e) class ArticleCreateUpdateView(APIView, SocailMediaPublishing): """ Article create update view """ permission_classes = (IsAuthenticated,) def get_tags(self, tags): """ this method will return tag name from tags objects """ tag_list = [] for tag in tags: tag_list.append(tag["name"]) return tag_list def publish(self, obj): serializer = ArticleSerializer(obj) json_data = serializer.data if json_data["hash_tags"]: tag_list = self.get_tags(json_data["hash_tags"]) json_data["hash_tags"] = tag_list ingest_to_elastic([json_data], "article", "article", "id") tweet_data = { "title": serializer.instance.title, "url": serializer.instance.source_url, } self.twitter(tweet_data) def post(self, request): publish = request.data.get("publish") # origin is used to join with cover image # to generate proper image url origin = request.META.get("HTTP_ORIGIN") cover_image_id = request.data.get("cover_image_id") if cover_image_id: DraftMedia.objects.filter(id=cover_image_id).delete() if not request.data.get("cover_image"): request.data["cover_image"] = "/".join( [origin, request.user.domain.default_image.url]) context = {"publish": publish, "user": request.user} serializer = ArticleCreateUpdateSerializer( data=request.data, context=context) if serializer.is_valid(): serializer.save() if publish: self.publish(serializer.instance) return Response(create_response(serializer.data)) return Response(create_error_response(serializer.errors), status=400) def put(self, request): _id = request.data.get("id") publish = request.data.get("publish") # origin is used to join with cover image # to generate proper image url origin = request.META.get("HTTP_ORIGIN") cover_image_id = request.data.get("cover_image_id") if cover_image_id: DraftMedia.objects.filter(id=cover_image_id).delete() if not request.data.get("cover_image"): request.data["cover_image"] = "/".join( [origin, request.user.domain.default_image.url]) context = {"publish": publish, "user": request.user} article = Article.objects.get(id=_id) serializer = ArticleCreateUpdateSerializer( article, data=request.data, context=context) if serializer.is_valid(): serializer.save() if publish: self.publish(serializer.instance) return Response(create_response(serializer.data)) return Response(create_error_response(serializer.errors), status=400) class ChangeArticleStatusView(APIView, SocailMediaPublishing): """ this view is used to update status of given article activate or deactivate """ permission_classes = (IsAuthenticated,) def get_tags(self, tags): """ this method will return tag name from tags objects """ tag_list = [] for tag in tags: tag_list.append(tag["name"]) return tag_list def publish(self, obj): serializer = ArticleSerializer(obj) json_data = serializer.data if obj.active: if json_data["hash_tags"]: tag_list = self.get_tags(json_data["hash_tags"]) json_data["hash_tags"] = tag_list ingest_to_elastic([json_data], "article", "article", "id") tweet_data = { "title": serializer.instance.title, "url": serializer.instance.source_url, } self.twitter(tweet_data) else: delete_from_elastic([json_data], "article", "article", "id") def post(self, request): _id = request.data.get("id") article = Article.objects.filter(id=_id).first() if not article: return Response(create_error_response({"error": "Article does not exists"}), status=400) article.active = request.data.get("activate") article.save() self.publish(article) return Response(create_response({ "id": article.id, "active": article.active})) class CategoryBulkUpdate(APIView): """ update whole bunch of articles in one go """ permission_classes = (IsAuthenticated,) def get_tags(self, tags): """ this method will return tag name from tags objects """ tag_list = [] for tag in tags: tag_list.append(tag["name"]) return tag_list def post(self, request): category_id = request.data['categories'] category = Category.objects.get(id=category_id) for article_id in request.data['articles']: current = Article.objects.get(id=article_id) current.category = category current.save() serializer = ArticleSerializer(current) json_data = serializer.data delete_from_elastic([json_data], "article", "article", "id") if json_data["hash_tags"]: tag_list = self.get_tags(json_data["hash_tags"]) json_data["hash_tags"] = tag_list ingest_to_elastic([json_data], "article", "article", "id") return Response({"ok": "cool"}) class GetDailyDigestView(ListAPIView): serializer_class = ArticleSerializer permission_classes = (AllowAny,) def format_response(self, response): results = [] if response.hits.hits: for result in response.hits.hits: results.append(result["_source"]) return results def get_queryset(self): device_id = self.request.GET.get("device_id", "") queryset = Devices.objects.filter(device_id=device_id) dd = DailyDigest.objects.filter(device__in=queryset) if not queryset.exists() or not dd.exists(): return [] return dd.first().articles.all().order_by("-published_on") def list(self, request, *args, **kwargs): queryset = self.get_queryset() if not queryset: sr = Search(using=es, index="article") sort = "desc" sr = sr.sort({"article_score": {"order": sort}}) sr = sr.sort({"published_on": {"order": sort}}) sr = sr[0:20] response = sr.execute() results = self.format_response(response) return Response(create_response({"results": results})) serializer = self.get_serializer(queryset, many=True) if serializer.data: return Response(create_response(serializer.data)) else: return Response(create_error_response({"Msg": "Daily Digest Doesn't Exist"}), status=400) class DraftMediaUploadViewSet(viewsets.ViewSet): """ this view is used to upload article images """ permission_classes = (IsAuthenticated,) def create(self, request): image_file = request.data.get("image") if not image_file: return Response(create_error_response({"error": "Image file is required."})) draft_image = DraftMedia.objects.create(image=image_file) serializer = DraftMediaSerializer(draft_image) return Response(create_response(serializer.data)) def update(self, request, pk): image_file = request.data.get("image") if not image_file: return Response(create_error_response({"error": "Image file is required."})) draft_image = DraftMedia.objects.get(id=pk) if not draft_image: return Http404 draft_image.image = image_file draft_image.save() serializer = DraftMediaSerializer(draft_image) return Response(create_response(serializer.data)) def destroy(self, request, pk): draft_image = DraftMedia.objects.get(id=pk) if not draft_image: return Http404 draft_image.delete() return Response(create_response({"Msg": "Image deleted successfully"})) class CommentViewSet(viewsets.ViewSet): serializer_class = CommentSerializer permission_classes = (IsAuthenticated,) pagination_class = PostpageNumberPagination ordering = "-created_at" def get_permissions(self): """ Instantiates and returns the list of permissions that this view requires. """ if self.action == 'list': self.permission_classes = [AllowAny] else: self.permission_classes = [IsAuthenticated] return [permission() for permission in self.permission_classes] def create(self, request): captcha_response_key = 0 captcha_key = request.data.get("captcha_key") captcha_value = request.data.get("captcha_value") captcha = CaptchaStore.objects.filter(hashkey=captcha_key).first() if not captcha: return Response(create_error_response({"error": "Invalid Captcha"})) if captcha.response != captcha_value.lower(): return Response(create_error_response({"error": "Invalid Captcha"})) data = request.data.copy() data["user"] = request.user.id serializer = CommentSerializer(data=data) if serializer.is_valid(): serializer.save() return Response(create_response({"result": serializer.data})) return Response(create_error_response({"error": "Enter Valid data"})) def list(self, request): article_id = request.GET.get("article_id", "") if not article_id: return Response( create_error_response( {"error": "Article ID has not been entered by the user"} ) ) article_obj = Article.objects.filter(id=article_id).first() if not article_obj: return Response(create_error_response({"error": "Article does not exist"}) ) comment_list = Comment.objects.filter(article=article_obj, reply=None) serializer = CommentSerializer(comment_list, many=True) return Response( create_response( {"results": serializer.data, "total_article_likes": ArticleLike.objects.filter( article=article_obj).count()})) def destroy(self, request, pk): comment_obj = Comment.objects.filter(id=pk) if not comment_obj: return Response(create_error_response({"error": "Comment does not exist"})) comment_obj.delete() return Response(create_response({"Msg": "Comment deleted successfully"})) class LikeAPIView(APIView): permission_classes = (IsAuthenticated,) pagination_class = PostpageNumberPagination ordering = "-created_at" def post(self, request): post_data = request.data.copy() post_data["user"] = request.user.id serializer = ArticleLikeSerializer(data=post_data) if serializer.is_valid(): serializer.save() if serializer.data.get("id"): return Response(create_response({"Msg": "Liked"})) return Response(create_response({"Msg": "Removed Like"})) return Response(create_error_response({"error": "Invalid Data Entered"})) class CaptchaCommentApiView(APIView): permission_classes = (IsAuthenticated,) def get(self, request): captcha_len = len(CaptchaStore.objects.all()) if captcha_len > 500: captcha = CaptchaStore.objects.order_by('?')[:1] to_json_response = dict() to_json_response['status'] = 1 to_json_response['new_captch_key'] = captcha[0].hashkey to_json_response['new_captch_image'] = captcha_image_url(to_json_response['new_captch_key']) return Response(create_response({"result": to_json_response})) else: to_json_response = dict() to_json_response['status'] = 1 to_json_response['new_captch_key'] = CaptchaStore.generate_key() to_json_response['new_captch_image'] = captcha_image_url(to_json_response['new_captch_key']) return Response(create_response({"result": to_json_response})) class AutoCompleteAPIView(generics.GenericAPIView): permission_classes = (AllowAny,) def format_response(self, response): results = [] if response['hits']['hits']: for result in response['hits']['hits']: results.append(result["_source"]) return results def get(self, request): result_list = [] if request.data: query = request.data["q"] else: query = request.GET.get("q", "") if query: results = es.search( index="auto_suggestions", body={ "suggest": { "results": { "text": query, "completion": {"field": "name_suggest"}, } } }, ) results = results['suggest']['results'][0]['options'] if results: for result in results: result_list.append( { "value": result["_source"]["name_suggest"], "key": result["_source"]["desc"], } ) return Response(create_response({"result": result_list})) return Response(create_response({"result": []})) class SubsAPIView(ListAPIView): serializer_class = SubsMediaSerializer permission_classes = (AllowAny,) pagination_class = PostpageNumberPagination def get(self, request): q = self.request.GET.get("q", None) subs = Subscription.objects.all() if q: subs = subs.filter(user__email__icontains=q) source = SubsMediaSerializer(subs, many=True) return Response(create_response({"results": source.data})) class UpdateSubsAPIView(APIView): serializer_class = SubsMediaSerializer permission_classes = (AllowAny,) def get(self, request, pk): source = SubsMediaSerializer(Subscription.objects.get(id=pk)) return Response(create_response({"results": source.data})) def post(self, request, *args, **kwargs): subs_id = self.request.POST.get('id') subs = Subscription.objects.filter(id=subs_id) if subs.exists(): subs = subs.first() subs.subs_type = self.request.POST.get('subs_type') auto_renew = self.request.POST.get('auto_renew') if auto_renew == 'No': subs.auto_renew = False else: subs.auto_renew = True subs.save() return Response(create_response({"results": "success"})) return Response(create_response({"results": "error"})) class UserProfileAPIView(APIView): permission_classes = (IsAuthenticated, ) def get(self, request, *args, **kwargs): user = BaseUserProfile.objects.filter(id=self.request.user.id).first() serializer = UserProfileSerializer(user) data = serializer.data response_data = create_response({"user": data}) return Response(response_data) def put(self, request, format=None): if request.user.is_authenticated: if request.data: _id = request.data["id"] else: _id = self.request.POST.get('id') user = BaseUserProfile.objects.get(id=_id) serializer = UserProfileSerializer(user, data=request.data) if serializer.is_valid(): serializer.save() return Response(create_response({"result":serializer.data, "Msg":"Profile updated successfully."})) return Response(create_error_response(serializer.errors), status=400) raise Http404 class AccessSession(APIView): permission_classes = (AllowAny,) def get(self, request): print(request.META.items()) request.session["ip"] = request.META.get('REMOTE_ADDR') return Response(create_response({"results": request.session._session_key})) class RSSAPIView(APIView): permission_classes = (AllowAny,) def get(self, request): data = {} domain = request.GET.get("domain") if domain: domain_obj = Domain.objects.filter(domain_id=domain).first() if domain_obj: menus = Menu.objects.filter(domain=domain_obj) for menu in menus: all_categories = menu.submenu.all() for category in all_categories: data[category.name.name] = "/article/rss/?domain=" + domain + "&category=" + category.name.name return Response(create_response({"results": data})) return Response(create_error_response({"error": "Domain do not exist."})) return Response(create_error_response({"error": "Domain is required"}))
37.736129
121
0.601306
from __future__ import unicode_literals from django.http import Http404 from core.models import (Category, Article, Source, BaseUserProfile, BookmarkArticle, ArticleLike, HashTag, Menu, Notification, Devices, SocialAccount, Category, CategoryAssociation, TrendingArticle, Domain, DailyDigest, DraftMedia, Comment, Subscription) from rest_framework.authtoken.models import Token from rest_framework.views import APIView from .serializers import (CategorySerializer, ArticleSerializer, UserSerializer, SourceSerializer, LoginUserSerializer, BaseUserProfileSerializer, BookmarkArticleSerializer, ArticleLikeSerializer, HashTagSerializer, MenuSerializer, NotificationSerializer, TrendingArticleSerializer, ArticleCreateUpdateSerializer, DraftMediaSerializer, CommentSerializer, CommentListSerializer, SubsMediaSerializer, UserProfileSerializer) from rest_framework.response import Response from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework import filters from newscout_web.constants import SOCIAL_AUTH_PROVIDERS from django.db.models import Q from rest_framework.exceptions import APIException from collections import OrderedDict from rest_framework import generics, viewsets from rest_framework.pagination import CursorPagination from rest_framework.generics import ListAPIView from rest_framework.parsers import JSONParser from django.core.mail import EmailMultiAlternatives from django.conf import settings from datetime import datetime, timedelta from django.db.models import Count, Max, Min import pytz import uuid from core.utils import es, ingest_to_elastic, delete_from_elastic from elasticsearch_dsl import Search import math from rest_framework.utils.urls import replace_query_param from google.auth.transport import requests as grequests from google.oauth2 import id_token import facebook from .exception_handler import (create_error_response, TokenIDMissing, ProviderMissing, SocialAuthTokenException) import logging import operator from functools import reduce import tweepy import json from captcha.models import CaptchaStore from captcha.helpers import captcha_image_url log = logging.getLogger(__name__) def create_response(response_data): response = OrderedDict() response["header"] = {"status": "1"} response["body"] = response_data return response def create_serializer_error_response(errors): error_list = [] for k, v in errors.items(): if isinstance(v, dict): _, v = v.popitem() d = {} d["field"] = k d["field_error"] = v[0] error_list.append(d) return OrderedDict({"header": {"status": "0"}, "errors": { "errorList": error_list}}) class SignUpAPIView(APIView): permission_classes = (AllowAny,) def post(self, request, *args, **kwargs): user_serializer = UserSerializer(data=request.data) if user_serializer.is_valid(): user_serializer.save() return Response(create_response({"Msg": "sign up successfully"})) else: return Response( create_serializer_error_response(user_serializer.errors), status=403) class LoginFieldsRequired(APIException): status_code = 401 default_detail = ("username and password are required") default_code = "username_and_password" class LoginAPIView(generics.GenericAPIView): serializer_class = LoginUserSerializer permission_classes = (AllowAny,) def post(self, request, format=None): serializer = LoginUserSerializer(data=request.data) if not serializer.is_valid(): res_data = create_serializer_error_response(serializer.errors) return Response(res_data, status=403) user = BaseUserProfile.objects.filter(email=request.data["email"]).first() device_name = request.data.get("device_name") device_id = request.data.get("device_id") if device_id and device_name: device, _ = Devices.objects.get_or_create(user=user, device_name=device_name, device_id=device_id) notification_obj, _ = Notification.objects.get_or_create(device=device) notification = NotificationSerializer(notification_obj) user_serializer = BaseUserProfileSerializer(user) token, _ = Token.objects.get_or_create(user=user) data = user_serializer.data data["token"] = token.key if device_id and device_name: data["breaking_news"] = notification.data['breaking_news'] data["daily_edition"] = notification.data['daily_edition'] data["personalized"] = notification.data['personalized'] response_data = create_response({"user": data}) return Response(response_data) class LogoutAPIView(APIView): permission_classes = (IsAuthenticated,) def get(self, request, format=None): request.user.auth_token.delete() return Response(create_response({"Msg": "User has been logged out"})) class UserHashTagAPIView(APIView): permission_classes = (IsAuthenticated,) parser_classes = (JSONParser,) def post(self, request, format=None): user = self.request.user hash_tags = request.data["tags"] user_tags = HashTag.objects.filter(name__in=hash_tags) if user_tags: user.passion.clear() user.passion.add(*user_tags) return Response(create_response({"Msg": "Successfully saved tags"})) return Response(create_error_response({"Msg": "Invalid tags"}), status=400) class CategoryListAPIView(APIView): permission_classes = (AllowAny,) def get(self, request, format=None, *args, **kwargs): categories = CategorySerializer(Category.objects.all(), many=True) return Response(create_response({"categories": categories.data})) def post(self, request, format=None): if request.user.is_authenticated: serializer = CategorySerializer(data=request.data, many=True) if serializer.is_valid(): serializer.save() return Response(create_response(serializer.data)) return Response(create_error_response(serializer.errors), status=400) raise Http404 def put(self, request, format=None): if request.user.is_authenticated: _id = request.data.get("id") category = Category.objects.get(id=_id) serializer = CategorySerializer(category, data=request.data) if serializer.is_valid(): serializer.save() return Response(create_response(serializer.data)) return Response(create_error_response(serializer.errors), status=400) raise Http404 class SourceListAPIView(APIView): permission_classes = (AllowAny,) def get(self, request, format=None, *args, **kwargs): source = SourceSerializer(Source.objects.all(), many=True) return Response(create_response({"results": source.data})) class NoarticleFound(APIException): status_code = 404 default_detail = ("Article does not exist") default_code = "no_article_found" class PostpageNumberPagination(CursorPagination): page_size = 10 page_size_query_param = 'page_size' ordering = '-created_at' class ArticleListAPIView(ListAPIView): serializer_class = ArticleSerializer permission_classes = (AllowAny,) pagination_class = PostpageNumberPagination filter_backends = (filters.OrderingFilter,) ordering = ('-published_on',) def get_queryset(self): q = self.request.GET.get("q", "") tag = self.request.GET.getlist("tag", "") category = self.request.GET.getlist("category", "") source = self.request.GET.getlist("source", "") queryset = Article.objects.all() if self.request.user.domain: queryset = queryset.filter(domain=self.request.user.domain) else: queryset = Article.objects.none() if source: queryset = queryset.filter(source__name__in=source) if category: queryset = queryset.filter(category__name__in=category) if tag: queryset = queryset.filter(hash_tags__name__in=tag) if q: q_list = q.split(" ") condition_1 = reduce(operator.or_, [Q(title__icontains=s) for s in q_list]) condition_2 = reduce(operator.or_, [Q(full_text__icontains=s) for s in q_list]) queryset = queryset.filter(condition_1 | condition_2) return queryset def list(self, request, *args, **kwargs): queryset = self.filter_queryset(self.get_queryset()) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) if serializer.data: paginated_response = self.get_paginated_response(serializer.data) return Response(create_response(paginated_response.data)) else: return Response(create_error_response({"Msg": "News Doesn't Exist"}), status=400) class ArticleDetailAPIView(APIView): permission_classes = (AllowAny,) def get(self, request, format=None, *args, **kwargs): slug = self.kwargs.get("slug", "") user = self.request.user article = Article.objects.filter(slug=slug).first() has_subscribed = False if not self.request.user.is_anonymous and \ Subscription.objects.filter( user=self.request.user).exlcude(subs_type='Basic').exists(): has_subscribed = True try: next_article = Article.objects.filter(id__gt=article.id).order_by("id")[0:1].get().slug except Exception as error: print(error) next_article = Article.objects.aggregate(Min("id"))['id__min'] try: prev_article = Article.objects.filter(id__gt=article.id).order_by("-id")[0:1].get().slug except Exception as error: print(error) prev_article = Article.objects.aggregate(Max("id"))['id__max'] if article: response_data = ArticleSerializer(article, context={ "hash_tags_list": True, 'has_subscribed': has_subscribed}).data if not user.is_anonymous: book_mark_article = BookmarkArticle.objects.filter( user=user, article=article).first() like_article = ArticleLike.objects.filter( user=user, article=article).first() if book_mark_article: response_data["isBookMark"] = True else: response_data["isBookMark"] = False if like_article: response_data["isLike"] = like_article.is_like else: response_data["isLike"] = 2 return Response(create_response({ "article": response_data, "next_article": next_article, "prev_article": prev_article})) raise NoarticleFound def post(self, request, *args, **kwargs): if request.user.is_authenticated: article_id = self.request.POST.get("article_id", "") is_like = self.request.POST.get("isLike", "") user = self.request.user article = Article.objects.filter(id=article_id).first() if article: if is_like and int(is_like) <= 2: article_like, created = ArticleLike.objects.get_or_create( user=user, article=article) article_like.is_like = is_like article_like.save() serializer = ArticleLikeSerializer(article_like) return Response(create_response({ "Msg": "Article like status changed", "article": serializer.data })) else: return Response(create_error_response({ "Msg": "Invalid Input" })) else: return Response(create_error_response({"Msg": "News doesn't exist"}), status=400) raise Http404 class ArticleBookMarkAPIView(APIView): permission_classes = (IsAuthenticated,) def post(self, request, *args, **kwargs): if request.data: article_id = request.data["article_id"] else: article_id = self.request.POST.get("article_id", "") user = self.request.user if article_id: article = Article.objects.filter(id=article_id).first() if article: bookmark_article, created = \ BookmarkArticle.objects.get_or_create(user=user, article=article) if not created: del_bookmark_article = BookmarkArticleSerializer(bookmark_article) del_bookmark = del_bookmark_article.data del_bookmark["status"] = 0 bookmark_article.delete() return Response(create_response({ "Msg": "Article removed from bookmark list", "bookmark_article": del_bookmark })) else: bookmark_article = BookmarkArticleSerializer(bookmark_article) return Response(create_response({ "Msg": "Article bookmarked successfully", "bookmark_article": bookmark_article.data })) raise NoarticleFound class ArticleRecommendationsAPIView(APIView): permission_classes = (AllowAny,) def format_response(self, response): results = [] if response['hits']['hits']: for result in response['hits']['hits']: results.append(result["_source"]) return results def get(self, request, *args, **kwargs): article_id = self.kwargs.get("article_id", "") if article_id: results = es.search(index='recommendation', body={"query": {"match": {"id": int(article_id)}}}) if results['hits']['hits']: recommendation = results['hits']['hits'][0]['_source']['recommendation'] search_results = es.search(index='article', body={ "query": {"terms": {"id": recommendation}}, "size": 25}) return Response(create_response({ "results": self.format_response(search_results) })) return Response(create_error_response({ "Msg": "Error generating recommendation" })) class ForgotPasswordAPIView(APIView): permission_classes = (AllowAny,) def genrate_password(self, password_length=10): random = str(uuid.uuid4()) random = random.upper() random = random.replace("-", "") return random[0:password_length] def send_mail_to_user(self, email, password, first_name="", last_name=""): username = first_name + " " + last_name email_subject = 'NewsPost: Forgot Password Request' email_body = """ <html> <head> </head> <body> <p> Hello """ + username + """,<br><br><b> """ + password + """</b> is your new password <br> <br> Thanks,<br> The NewsPost Team<br> </p> </body> </html>""" msg = EmailMultiAlternatives( email_subject, '', settings.EMAIL_FROM, [email]) ebody = email_body msg.attach_alternative(ebody, "text/html") msg.send(fail_silently=False) def post(self, request, *args, **kwargs): email = request.data["email"] if email: user = BaseUserProfile.objects.filter(email=email) if user: user = user.first() password = self.genrate_password() self.send_mail_to_user( email, password, user.first_name, user.last_name) user.set_password(password) user.save() return Response(create_response({ "Msg": "New password sent to your email" })) return Response(create_error_response({ "Msg": "Email Does Not Exist" })) class ChangePasswordAPIView(APIView): permission_classes = (IsAuthenticated,) def post(self, request, *args, **kwargs): if request.data: password = request.data["password"] old_password = request.data["old_password"] confirm_password = request.data["confirm_password"] else: password = self.request.POST.get("password", "") old_password = self.request.POST.get("old_password", "") confirm_password = self.request.POST.get("confirm_password", "") user = self.request.user if old_password: if not user.check_password(old_password): msg = "Old Password Does Not Match With User" return Response(create_error_response({ "Msg": msg, "field": "old_password" })) if confirm_password != password: msg = "Password and Confirm Password does not match" return Response(create_error_response({ "Msg": msg, "field": "confirm_password" })) if old_password == password: msg = "New password should not same as Old password" return Response(create_error_response({ "Msg": msg, "field": "password" })) if user and password: user.set_password(password) user.save() return Response(create_response({ "Msg": "Password changed successfully", "field": "confirm_password" })) else: return Response(create_error_response({ "Msg": "Password field is required", "field": "password" })) else: return Response(create_error_response({ "Msg": "Old Password field is required", "field": "old_password" })) class BookmarkArticleAPIView(APIView): permission_classes = (IsAuthenticated,) def get(self, request): user = self.request.user bookmark_list = BookmarkArticleSerializer(BookmarkArticle.objects.filter(user=user), many=True) return Response(create_response({"results": bookmark_list.data})) class ArticleLikeAPIView(APIView): permission_classes = (IsAuthenticated,) def get(self, request): like_list = [0, 1] user = self.request.user article_list = ArticleLikeSerializer(ArticleLike.objects.filter(user=user, is_like__in=like_list), many=True) return Response(create_response({"results": article_list.data})) class HashTagAPIView(ListAPIView): serializer_class = HashTagSerializer permission_classes = (AllowAny,) def get_queryset(self): weekly = self.request.GET.get("weekly", "") monthly = self.request.GET.get("monthly", "") end = datetime.utcnow() pst = pytz.timezone('Asia/Kolkata') end = pst.localize(end) utc = pytz.UTC end = end.astimezone(utc) articles = Article.objects.all() queryset = HashTag.objects.all() if weekly: weekly = int(weekly) start = end - timedelta(days=7 * weekly) hash_tags = articles.filter(published_on__range=(start, end)).values( 'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10] for hashtag in hash_tags: hashtag['name'] = hashtag.pop('hash_tags__name') queryset = hash_tags if monthly: monthly = int(monthly) start = end - timedelta(days=30 * monthly) hash_tags = articles.filter(published_on__range=(start, end)).values( 'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10] for hashtag in hash_tags: hashtag['name'] = hashtag.pop('hash_tags__name') queryset = hash_tags if not weekly and not monthly: start = end - timedelta(days=1) hash_tags = articles.filter(published_on__range=(start, end)).values( 'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10] for hashtag in hash_tags: hashtag['name'] = hashtag.pop('hash_tags__name') queryset = hash_tags return queryset def list(self, request, *args, **kwargs): queryset = self.filter_queryset(self.get_queryset()) page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) if serializer.data: paginated_response = self.get_paginated_response(serializer.data) return Response(create_response(paginated_response.data)) else: return Response(create_error_response({"Msg": "No trending tags"}), status=400) serializer = self.get_serializer(queryset, many=True) return Response(create_response(serializer.data)) class ArticleSearchAPI(APIView): permission_classes = (AllowAny,) def format_response(self, response): results = [] filters = {} if response.hits.hits: for result in response.hits.hits: source = result["_source"] if 'highlight' in result: if 'title' in result['highlight']: source['title'] = " ".join(result['highlight']['title']) if 'blurb' in result['highlight']: source['blurb'] = " ".join(result['highlight']['blurb']) results.append(source) if response.aggregations.category.buckets: filters["category"] = sorted( response.aggregations.category.buckets._l_, key=operator.itemgetter("key")) if response.aggregations.source.buckets: filters["source"] = sorted( response.aggregations.source.buckets._l_, key=operator.itemgetter("key")) if response.aggregations.hash_tags.buckets: filters["hash_tags"] = sorted( response.aggregations.hash_tags.buckets._l_, key=operator.itemgetter("key")) return results, filters def get(self, request): page = self.request.GET.get("page", "1") if page.isdigit(): page = int(page) else: page = 1 size = self.request.GET.get("rows", "20") if size.isdigit(): size = int(size) else: size = 20 query = self.request.GET.get("q", "") source = self.request.GET.getlist("source", []) category = self.request.GET.getlist("category", []) domain = self.request.GET.getlist("domain", []) tags = self.request.GET.getlist("tag", []) sort = self.request.GET.get("sort", "desc") if not domain: return Response(create_serializer_error_response({"domain": ["Domain id is required"]})) mlt_fields = ["has_tags"] if source: mlt_fields = ["has_tags", "source", "domain"] mlt = Search(using=es, index="article").query("more_like_this", fields=mlt_fields, like=query, min_term_freq=1, max_query_terms=12).source(mlt_fields) mlt.execute() sr = Search(using=es, index="article") sr = sr.highlight("title", "blurb", fragment_size=20000) must_query = [{"wildcard": {"cover_image": "*"}}] should_query = [] if query: query = query.lower() must_query.append({"multi_match": {"query": query, "fields": ["title", "blurb"], 'type': 'phrase'}}) if tags: tags = [tag.lower().replace("-", " ") for tag in tags] for tag in tags: sq = {"match_phrase": {"hash_tags": tag}} should_query.append(sq) if must_query: sr = sr.query("bool", must=must_query) if should_query: if len(should_query) > 1: sr = sr.filter("bool", should=should_query) else: sr = sr.filter("bool", should=should_query[0]) if domain: sr = sr.filter("terms", domain=list(domain)) if category: cat_objs = Category.objects.filter(name__in=category) category = cat_objs.values_list("id", flat=True) cat_assn_objs = CategoryAssociation.objects.filter( parent_cat__in=cat_objs).values_list( "child_cat__id", flat=True) if cat_assn_objs: new_category = set(list(cat_assn_objs) + list(category)) sr = sr.filter("terms", category_id=list(new_category)) else: if category: sr = sr.filter("terms", category_id=list(category)) if source: source = [s.lower() for s in source] sr = sr.filter("terms", source__keyword=source) sr = sr.sort({"article_score": {"order": sort}}) sr = sr.sort({"published_on": {"order": sort}}) start = (page - 1) * size end = start + size sr = sr[start:end] sr.aggs.bucket("category", "terms", field="category.keyword") sr.aggs.bucket("source", "terms", field="source.keyword") sr.aggs.bucket("hash_tags", "terms", field="hash_tags.keyword", size=50) response = sr.execute() results, filters = self.format_response(response) count = response["hits"]["total"] total_pages = math.ceil(count / size) url = request.build_absolute_uri() if end < count: next_page = page + 1 next_url = replace_query_param(url, "page", next_page) else: next_url = None if page != 1: previous_page = page - 1 previous_url = replace_query_param(url, "page", previous_page) else: previous_url = None data = { "results": results, "filters": filters, "count": count, "total_pages": total_pages, "current_page": page, "next": next_url, "previous": previous_url } return Response(create_response(data)) class MenuAPIView(APIView): permission_classes = (AllowAny,) def get(self, request): domain_id = self.request.GET.get("domain") if not domain_id: return Response(create_error_response({"domain": ["Domain id is required"]})) domain = Domain.objects.filter(domain_id=domain_id).first() if not domain: return Response(create_error_response({"domain": ["Domain id is required"]})) menus = MenuSerializer(Menu.objects.filter(domain=domain), many=True) menus_list = menus.data new_menulist = [] for menu in menus_list: menu_dict = {} menu_dict['heading'] = menu new_menulist.append(menu_dict) return Response(create_response({'results': new_menulist})) class DevicesAPIView(APIView): permission_classes = (IsAuthenticated,) def post(self, request, *args, **kwargs): user = self.request.user device_id = self.request.POST.get("device_id", "") device_name = self.request.POST.get("device_name", "") if not user.is_anonymous and device_id and device_name: user_device = Devices.objects.filter(user=user.pk) if user_device: user_device.update(device_id=device_id, device_name=device_name, user=user.id) return Response(create_response({"Msg": "Device successfully created"})) elif not user_device: get, created = Devices.objects.get_or_create(device_id=device_id, device_name=device_name, user=user.id) if created: return Response(create_response({"Msg": "Device successfully created"})) else: return Response(create_response({"Msg": "Device already exist"})) elif device_id and device_name: get, created = Devices.objects.get_or_create(device_id=device_id, device_name=device_name) if created: return Response(create_response({"Msg": "Device successfully created"})) else: return Response(create_response({"Msg": "Device already exist"})) else: return Response(create_error_response({"Msg": "device_id and device_name field are required"})) class NotificationAPIView(APIView): permission_classes = (AllowAny,) def post(self, request): device_id = request.data["device_id"] device_name = request.data["device_name"] breaking_news = request.data["breaking_news"] daily_edition = request.data["daily_edition"] personalized = request.data["personalized"] device = Devices.objects.get(device_id=device_id, device_name=device_name) if breaking_news and daily_edition and personalized and device: notification = Notification.objects.filter(device=device) if notification: notification.update(breaking_news=breaking_news, daily_edition=daily_edition, personalized=personalized) return Response(create_response({"Msg": "Notification updated successfully"})) Notification.objects.create(breaking_news=breaking_news, daily_edition=daily_edition, personalized=personalized, device=device) return Response(create_response({"Msg": "Notification created successfully"})) else: return Response( create_error_response( {"Msg": "device_id, device_name, breaking_news, daily_edition and personalized are required"})) def get(self, request): device_id = request.GET.get("device_id") device_name = request.GET.get("device_name") device = Devices.objects.filter(device_id=device_id, device_name=device_name).first() if device: notification = NotificationSerializer(Notification.objects.fitler(device=device), many=True) return Response(create_response(notification.data)) return Response(create_error_response({"Msg": "Invalid device_id or device_name"})) class SocialLoginView(generics.GenericAPIView): permission_classes = (AllowAny,) serializer_class = BaseUserProfileSerializer def decode_google_token(self, token_id): request = grequests.Request() try: id_info = id_token.verify_oauth2_token(token_id, request) return id_info except Exception as e: log.debug("error in google token verification {0}".format(e)) return False def get_name_details(self, id_info): first_name = last_name = "" if "name" in id_info: name = id_info.get("name") name_list = name.split(" ") first_name = name_list[0] if len(name_list) > 1: last_name = " ".join(name_list[1:]) if not first_name: if "given_name" in id_info: first_name = id_info.get("given_name") if not last_name: if "family_name" in id_info: last_name = id_info.get("family_name") return first_name, last_name def create_user_profile(self, first_name, last_name, username, email, image_url, sid, provider): user = BaseUserProfile.objects.filter(email=email).first() created = "" if not user: user = BaseUserProfile.objects.create( first_name=first_name, last_name=last_name, email=email, username=username ) sa_obj, created = SocialAccount.objects.get_or_create( social_account_id=sid, image_url=image_url, user=user, provider=provider ) return user, created def get_facebook_data(self, token_id): graph = facebook.GraphAPI(access_token=token_id) try: res_data = graph.get_object( id='me?fields=email,id,first_name,last_name,name,picture.width(150).height(150)') return res_data except Exception as e: log.debug("error in facebook fetch data: {0}".format(e)) return False def get_facebook_name_details(self, profile_data): name = first_name = last_name = "" if "first_name" in profile_data: first_name = profile_data.get("first_name") if "last_name" in profile_data: last_name = profile_data.get("last_name") if "name" in profile_data: name = profile_data.get("name") name_list = name.split(" ") if not first_name: first_name = name_list[0] if not last_name: last_name = " ".join(name[1:]) return first_name, last_name def get_user_serialize_data(self, email, device_id, device_name): user = BaseUserProfile.objects.filter(email=email).first() device = Devices.objects.filter(user=user.id) if device: device.update(device_name=device_name, device_id=device_id) else: device, created = Devices.objects.get_or_create(device_name=device_name, device_id=device_id) Devices.objects.filter(pk=device.pk).update(user=user) notification = NotificationSerializer(Notification.objects.get_or_create(device=device), many=True) token, _ = Token.objects.get_or_create(user=user) data = BaseUserProfileSerializer(user).data data["token"] = token.key data["breaking_news"] = notification.data[0]['breaking_news'] data["daily_edition"] = notification.data[0]['daily_edition'] data["personalized"] = notification.data[0]['personalized'] return data def post(self, request, *args, **kwargs): token_id = request.data.get("token_id") provider = request.data.get("provider") device_id = request.data.get("device_id") device_name = request.data.get("device_name") if not token_id: raise TokenIDMissing() if not provider: raise ProviderMissing() if not device_id: return Response(create_error_response({"Msg": "device_id is missing or Invalid device_id"})) if not device_name: return Response(create_error_response({"Msg": "device_name is missing or Invalid device_name"})) if provider not in SOCIAL_AUTH_PROVIDERS: raise ProviderMissing() if provider == "google": id_info = self.decode_google_token(token_id) if not id_info: raise SocialAuthTokenException() first_name, last_name = self.get_name_details(id_info) email = id_info.get("email", "") if not email: raise SocialAuthTokenException() username = email.split("@")[0] google_id = id_info.get("sub", "") image_url = id_info.get("picture", "") user, created = self.create_user_profile( first_name, last_name, username, email, image_url, google_id, provider) user_data = self.get_user_serialize_data(email, device_id, device_name) return Response(create_response({"user": user_data})) if provider == "facebook": profile_data = self.get_facebook_data(token_id) if not profile_data: raise SocialAuthTokenException() first_name, last_name = self.get_facebook_name_details( profile_data) email = profile_data.get("email") if not email: raise SocialAuthTokenException() username = username = email.split("@")[0] facebook_id = profile_data.get("id", "") image_url = "" if "picture" in profile_data: if "data" in profile_data["picture"]: image_url = profile_data["picture"]["data"]["url"] user, created = self.create_user_profile( first_name, last_name, username, email, image_url, facebook_id, provider) user_data = self.get_user_serialize_data(email, device_id, device_name) return Response(create_response({"user": user_data})) raise ProviderMissing() class TrendingArticleAPIView(APIView): permission_classes = (AllowAny,) def get(self, request, format=None, *args, **kwargs): domain_id = self.request.GET.get("domain") if not domain_id: return Response(create_error_response({"domain": ["Domain id is required"]})) domain = Domain.objects.filter(domain_id=domain_id).first() if not domain: return Response(create_error_response({"domain": ["Invalid domain name"]})) source = TrendingArticleSerializer(TrendingArticle.objects.filter(domain=domain), many=True) return Response(create_response({"results": source.data})) class SocailMediaPublishing(): def twitter(self, data): try: auth = tweepy.OAuthHandler(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET) auth.set_access_token(settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET) api = tweepy.API(auth) api.update_status(data["title"] + "\n" + data["url"]) except Exception as e: print("Error in twitter post: ", e) class ArticleCreateUpdateView(APIView, SocailMediaPublishing): permission_classes = (IsAuthenticated,) def get_tags(self, tags): tag_list = [] for tag in tags: tag_list.append(tag["name"]) return tag_list def publish(self, obj): serializer = ArticleSerializer(obj) json_data = serializer.data if json_data["hash_tags"]: tag_list = self.get_tags(json_data["hash_tags"]) json_data["hash_tags"] = tag_list ingest_to_elastic([json_data], "article", "article", "id") tweet_data = { "title": serializer.instance.title, "url": serializer.instance.source_url, } self.twitter(tweet_data) def post(self, request): publish = request.data.get("publish") origin = request.META.get("HTTP_ORIGIN") cover_image_id = request.data.get("cover_image_id") if cover_image_id: DraftMedia.objects.filter(id=cover_image_id).delete() if not request.data.get("cover_image"): request.data["cover_image"] = "/".join( [origin, request.user.domain.default_image.url]) context = {"publish": publish, "user": request.user} serializer = ArticleCreateUpdateSerializer( data=request.data, context=context) if serializer.is_valid(): serializer.save() if publish: self.publish(serializer.instance) return Response(create_response(serializer.data)) return Response(create_error_response(serializer.errors), status=400) def put(self, request): _id = request.data.get("id") publish = request.data.get("publish") origin = request.META.get("HTTP_ORIGIN") cover_image_id = request.data.get("cover_image_id") if cover_image_id: DraftMedia.objects.filter(id=cover_image_id).delete() if not request.data.get("cover_image"): request.data["cover_image"] = "/".join( [origin, request.user.domain.default_image.url]) context = {"publish": publish, "user": request.user} article = Article.objects.get(id=_id) serializer = ArticleCreateUpdateSerializer( article, data=request.data, context=context) if serializer.is_valid(): serializer.save() if publish: self.publish(serializer.instance) return Response(create_response(serializer.data)) return Response(create_error_response(serializer.errors), status=400) class ChangeArticleStatusView(APIView, SocailMediaPublishing): permission_classes = (IsAuthenticated,) def get_tags(self, tags): tag_list = [] for tag in tags: tag_list.append(tag["name"]) return tag_list def publish(self, obj): serializer = ArticleSerializer(obj) json_data = serializer.data if obj.active: if json_data["hash_tags"]: tag_list = self.get_tags(json_data["hash_tags"]) json_data["hash_tags"] = tag_list ingest_to_elastic([json_data], "article", "article", "id") tweet_data = { "title": serializer.instance.title, "url": serializer.instance.source_url, } self.twitter(tweet_data) else: delete_from_elastic([json_data], "article", "article", "id") def post(self, request): _id = request.data.get("id") article = Article.objects.filter(id=_id).first() if not article: return Response(create_error_response({"error": "Article does not exists"}), status=400) article.active = request.data.get("activate") article.save() self.publish(article) return Response(create_response({ "id": article.id, "active": article.active})) class CategoryBulkUpdate(APIView): permission_classes = (IsAuthenticated,) def get_tags(self, tags): tag_list = [] for tag in tags: tag_list.append(tag["name"]) return tag_list def post(self, request): category_id = request.data['categories'] category = Category.objects.get(id=category_id) for article_id in request.data['articles']: current = Article.objects.get(id=article_id) current.category = category current.save() serializer = ArticleSerializer(current) json_data = serializer.data delete_from_elastic([json_data], "article", "article", "id") if json_data["hash_tags"]: tag_list = self.get_tags(json_data["hash_tags"]) json_data["hash_tags"] = tag_list ingest_to_elastic([json_data], "article", "article", "id") return Response({"ok": "cool"}) class GetDailyDigestView(ListAPIView): serializer_class = ArticleSerializer permission_classes = (AllowAny,) def format_response(self, response): results = [] if response.hits.hits: for result in response.hits.hits: results.append(result["_source"]) return results def get_queryset(self): device_id = self.request.GET.get("device_id", "") queryset = Devices.objects.filter(device_id=device_id) dd = DailyDigest.objects.filter(device__in=queryset) if not queryset.exists() or not dd.exists(): return [] return dd.first().articles.all().order_by("-published_on") def list(self, request, *args, **kwargs): queryset = self.get_queryset() if not queryset: sr = Search(using=es, index="article") sort = "desc" sr = sr.sort({"article_score": {"order": sort}}) sr = sr.sort({"published_on": {"order": sort}}) sr = sr[0:20] response = sr.execute() results = self.format_response(response) return Response(create_response({"results": results})) serializer = self.get_serializer(queryset, many=True) if serializer.data: return Response(create_response(serializer.data)) else: return Response(create_error_response({"Msg": "Daily Digest Doesn't Exist"}), status=400) class DraftMediaUploadViewSet(viewsets.ViewSet): permission_classes = (IsAuthenticated,) def create(self, request): image_file = request.data.get("image") if not image_file: return Response(create_error_response({"error": "Image file is required."})) draft_image = DraftMedia.objects.create(image=image_file) serializer = DraftMediaSerializer(draft_image) return Response(create_response(serializer.data)) def update(self, request, pk): image_file = request.data.get("image") if not image_file: return Response(create_error_response({"error": "Image file is required."})) draft_image = DraftMedia.objects.get(id=pk) if not draft_image: return Http404 draft_image.image = image_file draft_image.save() serializer = DraftMediaSerializer(draft_image) return Response(create_response(serializer.data)) def destroy(self, request, pk): draft_image = DraftMedia.objects.get(id=pk) if not draft_image: return Http404 draft_image.delete() return Response(create_response({"Msg": "Image deleted successfully"})) class CommentViewSet(viewsets.ViewSet): serializer_class = CommentSerializer permission_classes = (IsAuthenticated,) pagination_class = PostpageNumberPagination ordering = "-created_at" def get_permissions(self): if self.action == 'list': self.permission_classes = [AllowAny] else: self.permission_classes = [IsAuthenticated] return [permission() for permission in self.permission_classes] def create(self, request): captcha_response_key = 0 captcha_key = request.data.get("captcha_key") captcha_value = request.data.get("captcha_value") captcha = CaptchaStore.objects.filter(hashkey=captcha_key).first() if not captcha: return Response(create_error_response({"error": "Invalid Captcha"})) if captcha.response != captcha_value.lower(): return Response(create_error_response({"error": "Invalid Captcha"})) data = request.data.copy() data["user"] = request.user.id serializer = CommentSerializer(data=data) if serializer.is_valid(): serializer.save() return Response(create_response({"result": serializer.data})) return Response(create_error_response({"error": "Enter Valid data"})) def list(self, request): article_id = request.GET.get("article_id", "") if not article_id: return Response( create_error_response( {"error": "Article ID has not been entered by the user"} ) ) article_obj = Article.objects.filter(id=article_id).first() if not article_obj: return Response(create_error_response({"error": "Article does not exist"}) ) comment_list = Comment.objects.filter(article=article_obj, reply=None) serializer = CommentSerializer(comment_list, many=True) return Response( create_response( {"results": serializer.data, "total_article_likes": ArticleLike.objects.filter( article=article_obj).count()})) def destroy(self, request, pk): comment_obj = Comment.objects.filter(id=pk) if not comment_obj: return Response(create_error_response({"error": "Comment does not exist"})) comment_obj.delete() return Response(create_response({"Msg": "Comment deleted successfully"})) class LikeAPIView(APIView): permission_classes = (IsAuthenticated,) pagination_class = PostpageNumberPagination ordering = "-created_at" def post(self, request): post_data = request.data.copy() post_data["user"] = request.user.id serializer = ArticleLikeSerializer(data=post_data) if serializer.is_valid(): serializer.save() if serializer.data.get("id"): return Response(create_response({"Msg": "Liked"})) return Response(create_response({"Msg": "Removed Like"})) return Response(create_error_response({"error": "Invalid Data Entered"})) class CaptchaCommentApiView(APIView): permission_classes = (IsAuthenticated,) def get(self, request): captcha_len = len(CaptchaStore.objects.all()) if captcha_len > 500: captcha = CaptchaStore.objects.order_by('?')[:1] to_json_response = dict() to_json_response['status'] = 1 to_json_response['new_captch_key'] = captcha[0].hashkey to_json_response['new_captch_image'] = captcha_image_url(to_json_response['new_captch_key']) return Response(create_response({"result": to_json_response})) else: to_json_response = dict() to_json_response['status'] = 1 to_json_response['new_captch_key'] = CaptchaStore.generate_key() to_json_response['new_captch_image'] = captcha_image_url(to_json_response['new_captch_key']) return Response(create_response({"result": to_json_response})) class AutoCompleteAPIView(generics.GenericAPIView): permission_classes = (AllowAny,) def format_response(self, response): results = [] if response['hits']['hits']: for result in response['hits']['hits']: results.append(result["_source"]) return results def get(self, request): result_list = [] if request.data: query = request.data["q"] else: query = request.GET.get("q", "") if query: results = es.search( index="auto_suggestions", body={ "suggest": { "results": { "text": query, "completion": {"field": "name_suggest"}, } } }, ) results = results['suggest']['results'][0]['options'] if results: for result in results: result_list.append( { "value": result["_source"]["name_suggest"], "key": result["_source"]["desc"], } ) return Response(create_response({"result": result_list})) return Response(create_response({"result": []})) class SubsAPIView(ListAPIView): serializer_class = SubsMediaSerializer permission_classes = (AllowAny,) pagination_class = PostpageNumberPagination def get(self, request): q = self.request.GET.get("q", None) subs = Subscription.objects.all() if q: subs = subs.filter(user__email__icontains=q) source = SubsMediaSerializer(subs, many=True) return Response(create_response({"results": source.data})) class UpdateSubsAPIView(APIView): serializer_class = SubsMediaSerializer permission_classes = (AllowAny,) def get(self, request, pk): source = SubsMediaSerializer(Subscription.objects.get(id=pk)) return Response(create_response({"results": source.data})) def post(self, request, *args, **kwargs): subs_id = self.request.POST.get('id') subs = Subscription.objects.filter(id=subs_id) if subs.exists(): subs = subs.first() subs.subs_type = self.request.POST.get('subs_type') auto_renew = self.request.POST.get('auto_renew') if auto_renew == 'No': subs.auto_renew = False else: subs.auto_renew = True subs.save() return Response(create_response({"results": "success"})) return Response(create_response({"results": "error"})) class UserProfileAPIView(APIView): permission_classes = (IsAuthenticated, ) def get(self, request, *args, **kwargs): user = BaseUserProfile.objects.filter(id=self.request.user.id).first() serializer = UserProfileSerializer(user) data = serializer.data response_data = create_response({"user": data}) return Response(response_data) def put(self, request, format=None): if request.user.is_authenticated: if request.data: _id = request.data["id"] else: _id = self.request.POST.get('id') user = BaseUserProfile.objects.get(id=_id) serializer = UserProfileSerializer(user, data=request.data) if serializer.is_valid(): serializer.save() return Response(create_response({"result":serializer.data, "Msg":"Profile updated successfully."})) return Response(create_error_response(serializer.errors), status=400) raise Http404 class AccessSession(APIView): permission_classes = (AllowAny,) def get(self, request): print(request.META.items()) request.session["ip"] = request.META.get('REMOTE_ADDR') return Response(create_response({"results": request.session._session_key})) class RSSAPIView(APIView): permission_classes = (AllowAny,) def get(self, request): data = {} domain = request.GET.get("domain") if domain: domain_obj = Domain.objects.filter(domain_id=domain).first() if domain_obj: menus = Menu.objects.filter(domain=domain_obj) for menu in menus: all_categories = menu.submenu.all() for category in all_categories: data[category.name.name] = "/article/rss/?domain=" + domain + "&category=" + category.name.name return Response(create_response({"results": data})) return Response(create_error_response({"error": "Domain do not exist."})) return Response(create_error_response({"error": "Domain is required"}))
true
true
f703ed2aa1c7638b37cd9459328cc99a0a5f16f3
34,077
py
Python
main.py
RedaLegzali/mr-driller
c944b10ff17ece1445b55cf3b44a4aca411dcc0d
[ "MIT" ]
null
null
null
main.py
RedaLegzali/mr-driller
c944b10ff17ece1445b55cf3b44a4aca411dcc0d
[ "MIT" ]
null
null
null
main.py
RedaLegzali/mr-driller
c944b10ff17ece1445b55cf3b44a4aca411dcc0d
[ "MIT" ]
null
null
null
import pygame, time from pygame.locals import * from random import * pygame.init() # Variables Pygame white = (255, 255, 255) crystal = (162,162,162) black = (0, 0, 0) rose = (236,28,115) red = pygame.Color('#ff0000') green = pygame.Color('#00ff62') blue = pygame.Color('#0026ff') yellow = (222,207,4) width = 800 height = 600 clock = pygame.time.Clock() pop_block = pygame.mixer.Sound("Music/pop_block.wav") # Images walkRight = [pygame.image.load('Driller/droite1.png'), pygame.image.load('Driller/droite2.png'), pygame.image.load('Driller/droite3.png'),pygame.image.load('Driller/droite4.png'), pygame.image.load('Driller/droite5.png'), pygame.image.load('Driller/droite6.png'), pygame.image.load('Driller/droite7.png'), pygame.image.load('Driller/droite8.png'), pygame.image.load('Driller/droite9.png')] walkLeft = [pygame.image.load('Driller/gauche1.png'), pygame.image.load('Driller/gauche2.png'), pygame.image.load('Driller/gauche3.png'),pygame.image.load('Driller/gauche4.png'), pygame.image.load('Driller/gauche5.png'),pygame.image.load('Driller/gauche6.png'), pygame.image.load('Driller/gauche7.png'),pygame.image.load('Driller/gauche8.png'), pygame.image.load('Driller/gauche9.png')] fall = [ pygame.image.load('Driller/fall.png'), pygame.image.load('Driller/fall1.png') ] centre = pygame.image.load('Driller/centre.png') blocks = [ pygame.image.load("Blocks/block_jaune.png"), pygame.image.load("Blocks/block_vert.png"), pygame.image.load("Blocks/block_bleu.png"), pygame.image.load("Blocks/block_rouge.png"), pygame.image.load("Blocks/block_blanc.png"), pygame.image.load("Blocks/block_crystal.png"), pygame.image.load("Blocks/block_niveau.png") ] blocks_fissure = [ pygame.image.load("Blocks/block.png"), pygame.image.load("Blocks/block1.png"), pygame.image.load("Blocks/block2.png"), pygame.image.load("Blocks/block3.png"), pygame.image.load("Blocks/block4.png"), pygame.image.load("Blocks/block5.png") ] image_drill_left = pygame.image.load("Driller/drill_left.png") image_drill_right = pygame.image.load("Driller/drill_right.png") image_drill_down = pygame.image.load("Driller/drill_down.png") oxy_display = pygame.image.load("Blocks/oxy_display.png") capsule = pygame.image.load("Blocks/capsule_oxygene.png") dead_crash = pygame.image.load("Driller/ecraser.png") dead_air = pygame.image.load("Driller/asph.png") ange = pygame.image.load("Driller/ange.png") depth_display = pygame.image.load("Blocks/depth.png") score_display = pygame.image.load("Blocks/score.png") level_display = pygame.image.load("Blocks/level.png") air_display = pygame.image.load("Blocks/air.png") air_support_display=pygame.image.load("Blocks/air_support.png") air_pourcent_display = pygame.image.load("Blocks/pourcent.png") lives_display = pygame.image.load("Blocks/lives.png") # Variables Globales drill_left = False drill_right = False compteur_drill = 0 temps_recuperer = 0 cologne = 12 ligne = 35 game_over = False surface = pygame.display.set_mode( (width,height) ) pygame.display.set_caption("Mr Driller") obstacles = [[None]*cologne for l in range(ligne) ] x = 100 y = 5 gravity = 5 left = False right = False walkCount = 0 fallCount = 0 pourcentage = 100 points = 0 profondeur = 0 GameOver = False Death = 0 death_depth = [] CountDeath = 3 Capsule_Air = 10 name_list = [] # SP def saisie(): global name_list running = True play = False while running: for event in pygame.event.get(): if event.type == pygame.QUIT: running = False if event.type == KEYDOWN: if event.key == K_RETURN: running = False play = True if event.type == pygame.KEYDOWN and len(name_list) != 30: if event.key == pygame.K_a: name_list.append("a") elif event.key == pygame.K_b: name_list.append("b") elif event.key == pygame.K_c: name_list.append("c") elif event.key == pygame.K_d: name_list.append("d") elif event.key == pygame.K_e: name_list.append("e") elif event.key == pygame.K_f: name_list.append("f") elif event.key == pygame.K_g: name_list.append("g") elif event.key == pygame.K_h: name_list.append("h") elif event.key == pygame.K_i: name_list.append("i") elif event.key == pygame.K_j: name_list.append("j") elif event.key == pygame.K_k: name_list.append("k") elif event.key == pygame.K_l: name_list.append("l") elif event.key == pygame.K_m: name_list.append("m") elif event.key == pygame.K_n: name_list.append("n") elif event.key == pygame.K_o: name_list.append("o") elif event.key == pygame.K_p: name_list.append("p") elif event.key == pygame.K_q: name_list.append("q") elif event.key == pygame.K_r: name_list.append("r") elif event.key == pygame.K_s: name_list.append("s") elif event.key == pygame.K_t: name_list.append("t") elif event.key == pygame.K_u: name_list.append("u") elif event.key == pygame.K_v: name_list.append("v") elif event.key == pygame.K_w: name_list.append("w") elif event.key == pygame.K_x: name_list.append("x") elif event.key == pygame.K_y: name_list.append("y") elif event.key == pygame.K_z: name_list.append("z") elif event.key == pygame.K_SPACE: name_list.append(" ") if event.type == pygame.KEYDOWN: if event.key == pygame.K_BACKSPACE and len(name_list) > 0: name_list.pop(-1) #surface.fill( (0,0,0) ) ecran_saisie = pygame.image.load("Screens/EnterNameBetter.png") ecran_saisie = pygame.transform.scale(ecran_saisie, (width, height)) surface.blit(ecran_saisie,(0,0)) string = ''.join(name_list) font = pygame.font.Font("Screens/monospace.ttf" , 40) texte = font.render(string , True , (0,0,0)) rectangle = texte.get_rect() rectangle.topleft = (150,130) surface.blit(texte,rectangle) pygame.display.update() clock.tick(60) return play , string def air(): global pourcentage , GameOver , Death , x , death_depth pos_x = 620 pos_y = 300 font = pygame.font.Font("freesansbold.ttf", 30) if pourcentage <= 0: GameOver = True Death = 1 if pourcentage > 100: pourcentage = 100 text_temps = font.render(str(pourcentage), True, white) list_rotato = [oxy_display for loop in range(pourcentage)] surface.blit(text_temps, (pos_x+80, pos_y+40)) surface.blit(air_display,(pos_x-20,pos_y-50)) surface.blit(air_support_display,(pos_x-8,pos_y-3)) surface.blit(air_pourcent_display,(pos_x+135,pos_y+40 )) longueur_barre = 0 for k in list_rotato: surface.blit(k, (pos_x + longueur_barre, pos_y)) longueur_barre += 1.5 def score(points): pos_x = 620 pos_y = 150 font = pygame.font.Font("freesansbold.ttf", 30) pygame.draw.circle(surface,rose,(pos_x,pos_y+20),10,0) pygame.draw.circle(surface,rose,(pos_x+30,pos_y+20),10,0) text_score = font.render(str(points), True, white) text = font.render("PTS", True, rose) surface.blit(text_score, (pos_x+80, pos_y+30)) surface.blit(text, (pos_x+100, pos_y+60)) surface.blit(score_display,(pos_x-20,pos_y-30)) def depth(profondeur): pos_x = 620 pos_y = 50 font = pygame.font.Font("freesansbold.ttf", 30) pygame.draw.circle(surface, yellow, (pos_x, pos_y), 10, 0) pygame.draw.circle(surface, yellow, (pos_x + 30, pos_y), 10, 0) text_score = font.render(str(profondeur), True, white) text = font.render("FT", True, yellow) surface.blit(text_score, (pos_x + 80, pos_y)) surface.blit(text, (pos_x + 100, pos_y + 30)) surface.blit(depth_display,(600,0)) def lives(DeathCount): pos_x = 560 pos_y = 400 font = pygame.font.Font("freesansbold.ttf", 30) text_score = font.render(str(DeathCount), True, white) text = font.render("x", True, red) surface.blit(text_score, (pos_x + 180, pos_y+32)) surface.blit(text, (pos_x + 150, pos_y+30)) surface.blit(ange,(pos_x + 80, pos_y+5)) surface.blit(lives_display,(600,pos_y-25)) def levels(): pos_x=600 pos_y= 480 font = pygame.font.Font("freesansbold.ttf", 30) text_level = font.render(str(level), True, white) surface.blit(text_level, (pos_x+50 , pos_y+50)) surface.blit(level_display,(pos_x,pos_y)) def chrono(seconds): time.sleep(1) return (seconds + 1) def intro(): pygame.mixer.music.load("Intro/intro_music.mp3") # je rapporte la musique pygame.display.flip() font = pygame.font.Font(None, 24) clock = pygame.time.Clock() seconds = 0 nextimg = 1 ''' Chargement des images et choix de la premiere image''' images = [ pygame.image.load("Intro/Start_screen1.png"), pygame.image.load("Intro/Start_screen2.png"), pygame.image.load("Intro/Start_screen3.png"), pygame.image.load("Intro/Start_screen4.png"), pygame.image.load("Intro/Start_screen5.png"), pygame.image.load("Intro/Start_screen6.png"), pygame.image.load("Intro/Start_screen7.png"), pygame.image.load("Intro/Start_screen8.png") ] pygame.mixer.music.play(0) # On lance la musique running = True play = False while running: seconds = chrono(seconds) # on lance le chrono if seconds > 0 and seconds % 3 == 0: # tout les trois secondes on change d'images nextimg += 1 if nextimg <= len(images): choix_image = images[nextimg-1] choix_image = pygame.transform.scale(choix_image, (width, height)) text_temps = font.render(str(seconds) + " seconds since start", 1,(255, 255, 255)) # petite indicateur de temps surface.blit(choix_image, (0, 0)) surface.blit(text_temps, (0, 0)) for event in pygame.event.get(): if event.type == pygame.QUIT: running = False if event.type == KEYDOWN: if event.key == K_SPACE: running = False play = True pygame.display.update() clock.tick(60) return play def initialise(): global obstacles x_cube = 0 hauteur = y+200 caps = Capsule_Air for i in range(0,ligne-5): for j in range(cologne): if caps != 0: square_type = randint(1,8) else: square_type = randint(1,7) if square_type == 8: caps -= 1 square = pygame.Rect(x_cube, hauteur, 50, 50) if square_type == 5: obstacles[i][j] = [square, square_type,0,0] elif square_type == 7: obstacles[i][j] = [square, square_type,0] else: obstacles[i][j] = [square, square_type] cpt = 3 while cpt >= 3: cpt = 0 for k in range(j - 1, j - 4, -1): if k >= 0: if obstacles[i][k] != None: if (obstacles[i][k])[1] == (obstacles[i][j])[1]: cpt += 1 for l in range(i - 1, i - 4, -1): if l >= 0: if obstacles[l][j] != None: if (obstacles[l][j])[1] == (obstacles[i][j])[1]: cpt += 1 if cpt >= 3: square_type = randint(1, 7) if square_type == 5: obstacles[i][j] = [square, square_type, 0, 0] elif square_type == 7: obstacles[i][j] = [square, square_type, 0] else: obstacles[i][j] = [square, square_type] x_cube += 50 x_cube = 0 hauteur += 50 hauteur += 400 for i in range(ligne-5 , ligne): for j in range(cologne): square = pygame.Rect(x_cube, hauteur, 50, 50) obstacles[i][j] = [square , 9] x_cube += 50 x_cube = 0 hauteur += 50 def draw(): global collision_vertical , x , y , obstacles surface.fill(black) pygame.draw.line(surface, white, (600, 0), (600, height)) pygame.draw.line(surface, rose, (600, 125), (width, 125)) pygame.draw.line(surface, rose, (600, 250), (width, 250)) pygame.draw.line(surface, rose, (600, 375), (width, 375)) pygame.draw.line(surface, rose, (600, 500), (width, 500)) for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: if (obstacles[i][j])[1] == 1: #pygame.draw.rect(surface, red, (obstacles[i][j])[0]) surface.blit( blocks[3] , (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 2: #pygame.draw.rect(surface, blue, (obstacles[i][j])[0]) surface.blit( blocks[2] , (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 3: #pygame.draw.rect(surface, yellow, (obstacles[i][j])[0]) surface.blit( blocks[0] , (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 4: #pygame.draw.rect(surface, green, (obstacles[i][j])[0]) surface.blit( blocks[1] , (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 5: surface.blit(blocks_fissure[ (obstacles[i][j])[2] ], (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 6: #pygame.draw.rect(surface, white, (obstacles[i][j])[0]) surface.blit(blocks[4], (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 7: #pygame.draw.rect(surface, crystal, (obstacles[i][j])[0]) surface.blit(blocks[5], (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 8: surface.blit(capsule, (obstacles[i][j])[0]) else: surface.blit(blocks[6], (obstacles[i][j])[0]) def move(): global walkCount , fallCount ,x, y , liste_blocks , compteur_drill , GameOver , Death , second_death , obstacles \ , death_depth if walkCount + 1 >= 27: walkCount = 0 if fallCount+1 == 6: fallCount = 0 if Death == 2: if second_death >= 100: image_ange = ange image_ange = pygame.transform.scale(image_ange, (55, 55)) surface.blit(image_ange, (x - 10, y - 10)) else: image_death = dead_crash image_death = pygame.transform.scale(image_death, (55, 55)) surface.blit(image_death, (x - 10, y - 10)) elif Death == 1: if second_death >= 100: image_ange = ange image_ange = pygame.transform.scale(image_ange, (55, 55)) surface.blit(image_ange, (x - 10, y - 10)) else: image_air = dead_air image_air = pygame.transform.scale(image_air, (55, 55)) surface.blit(image_air, (x - 10, y - 10)) elif not collision_horizontal: image_fall = pygame.transform.scale(fall[fallCount // 3], (55, 55)) surface.blit(image_fall, (x - 10, y - 10)) fallCount += 1 y += gravity elif compteur_drill != 0: if drill_right and not drill_left: image_d_right = image_drill_right image_d_right = pygame.transform.scale(image_d_right, (55, 55)) surface.blit(image_d_right, (x - 10, y - 10)) elif not drill_right and drill_left: image_d_left = image_drill_left image_d_left = pygame.transform.scale(image_d_left, (55, 55)) surface.blit(image_d_left, (x - 10, y - 10)) else: image_d_down = image_drill_down image_d_down = pygame.transform.scale(image_d_down, (55, 55)) surface.blit(image_d_down, (x - 10, y - 10)) compteur_drill -= 1 else: if left == True: image_left = walkLeft[walkCount//3] image_left = pygame.transform.scale(image_left, (55, 55)) surface.blit(image_left , (x-10,y-10)) walkCount += 1 elif right == True: image_right = walkRight[walkCount // 3] image_right = pygame.transform.scale(image_right, (55, 55)) surface.blit(image_right , (x-10,y-10)) walkCount += 1 else: image_centre = pygame.transform.scale(centre, (55, 55)) surface.blit(image_centre, (x - 10, y - 10)) for element in liste_blocks: square = element[0] compteur = element[1] seconds_gravity = element[2] if compteur == 50: i,j = element[5] , element[3] destruction_block(i,j) liste_blocks.remove(element) else: if compteur == 0: if seconds_gravity == 100: square.x = element[3]*50 square.y += gravity element[1] += gravity else: if seconds_gravity % 5 == 0: if element[4] == -2: element[4] = 2 else: element[4] = -2 square.x += element[4] else: square.y += gravity element[1] += gravity i,j = element[5] , element[3] if obstacles[i][j] != None: if (obstacles[i][j])[1] != 8: if (square.bottom-5 > driller.top and ( square.left-5 < driller.left < square.right-5 or square.left+5 < driller.right < square.right+5) ): GameOver = True Death = 2 death_depth = [i,j] def events(): global left , right , x , y , walkCount , collision_vertical_right , collision_vertical_left , drill_right , drill_left keys = pygame.key.get_pressed() if compteur_drill == 0: if not GameOver: if keys[pygame.K_LEFT] and x > 5: if not collision_vertical_left: x -= 5 left = True right = False drill_right = False drill_left = False elif keys[pygame.K_RIGHT] and x < 560: if not collision_vertical_right: x += 5 drill_right = False drill_left = False right = True left = False else: right = False left = False drill_left = False drill_right = False walkCount = 0 if jump == True: if not GameOver: if keys[pygame.K_SPACE]: y -= 55 def collisions_player(): global collision_vertical_right , collision_vertical_left , collision_horizontal , x,y , jump , obstacles ,\ drill_ticker , drill_right , drill_left , compteur_drill , pourcentage , points , profondeur , death_depth keys = pygame.key.get_pressed() liste = [] for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: square = (obstacles[i][j])[0] if driller.colliderect(square): if y == square.y - 45: collision_horizontal = True liste.append((i,j)) profondeur = ( ligne*(level-1) ) + i else: jmp = False if x == square.x + 45: if (obstacles[i][j])[1] != 8: collision_vertical_left = True if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_a] and drill_ticker == 0: collisions_blocks(i, j) drill_left = True drill_right = False compteur_drill = 20 drill_ticker = 20 if i != 0: if (obstacles[i - 1][j]) != None: if (obstacles[i - 1][j ])[1] != 8: if (obstacles[i - 1][j])[0].bottom + 5 == driller.top: jmp = True if (obstacles[i - 1][j+1]) != None: if (obstacles[i - 1][j+1])[1] != 8: if (obstacles[i - 1][j+1])[0].bottom + 5 == driller.top: jmp = True if jmp == False: jump = True else: jump = True else: obstacles[i][j] = None points += 1 pourcentage += 20 if x == square.x - 35: if (obstacles[i][j])[1] != 8: collision_vertical_right = True if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_e] and drill_ticker == 0: drill_ticker = 20 collisions_blocks(i, j) drill_right = True drill_left = False compteur_drill = 20 if i != 0: if (obstacles[i-1][j]) != None: if (obstacles[i - 1][j])[1] != 8: if (obstacles[i-1][j])[0].bottom+5 == driller.top: jmp = True if (obstacles[i-1][j-1]) != None: if (obstacles[i - 1][j - 1])[1] != 8: if (obstacles[i-1][j-1])[0].bottom+5 == driller.top: jmp = True if jmp == False: jump = True else: jump = True else: obstacles[i][j] = None points += 1 pourcentage += 20 for element in liste: i = element[0] j = element[1] if obstacles[i][j] != None: square = (obstacles[i][j])[0] if len(liste) == 2: if square.x+15 == x: if Death == 1: death_depth = [i,j] x -= 5 if (obstacles[i][j])[1] != 8: if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_z]: # Right if obstacles[i][j] != None: collisions_blocks(i, j) x -= 5 drill_ticker = 20 drill_right = True drill_left = False compteur_drill = 20 else: obstacles[i][j] = None points += 1 pourcentage += 20 elif square.x-5 == x: if Death == 1: death_depth = [i, j] x += 5 if (obstacles[i][j])[1] != 8: if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_z]: # Left if obstacles[i][j] != None: collisions_blocks(i, j) x += 5 drill_right = False drill_right = True drill_ticker = 20 compteur_drill = 20 else: obstacles[i][j] = None points += 1 pourcentage += 20 else: if Death == 1: death_depth = [i, j] if (obstacles[i][j])[1] != 8: if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_z]: # Down if obstacles[i][j] != None and drill_ticker == 0: drill_ticker = 20 collisions_blocks(i, j) drill_right = False drill_right = False compteur_drill = 20 else: obstacles[i][j] = None points += 1 pourcentage += 20 def gravity_blocks(): global obstacles , gravity , liste_blocks liste = [] for i in range(1 , ligne): for j in range(0,cologne): if obstacles[i][j] == None and obstacles[i-1][j] != None: liste.append( (i-1 , i , j) ) for element in liste: i = element[1] i_1 = element[0] j = element[2] j_sup = j+1 if (obstacles[i_1][j])[1] != 6: continue_sup = False while j_sup < cologne and i_1+1 < ligne: if obstacles[i_1][j_sup] != None: if (obstacles[i_1][j])[1] == (obstacles[i_1][j_sup])[1]: if obstacles[i_1+1][j_sup] != None: continue_sup = True break else: break else: break j_sup += 1 if continue_sup: continue j_inf = j-1 continue_inf = False while j_inf < cologne and i_1 + 1 < ligne: if obstacles[i_1][j_inf] != None: if (obstacles[i_1][j])[1] == (obstacles[i_1][j_inf])[1]: if obstacles[i_1 + 1][j_inf] != None: continue_inf = True break else: break else: break j_inf -= 1 if continue_inf: continue obstacles[i][j] = obstacles[i_1][j] obstacles[i_1][j] = None liste_blocks.append( [ (obstacles[i][j])[0] , 0 , 0 , j , 2, i ] ) def collisions_blocks(i,j): global obstacles , points , NextLevel if (obstacles[i][j])[1] == 9: NextLevel = True elif (obstacles[i][j])[1] != 8: liste = [ (i,j) ] compteur = 1 while compteur != 0: compteur = 0 for element in liste: position_i = element[0] position_j = element[1] i_sup = position_i + 1 i_inf = position_i - 1 j_sup = position_j + 1 j_inf = position_j - 1 if i_sup < ligne and obstacles[i_sup][position_j] != None: if (i_sup , position_j) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[i_sup][position_j])[1]: liste.append((i_sup, position_j)) compteur += 1 if i_inf >= 0 and obstacles[i_inf][position_j] != None: if (i_inf , position_j) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[i_inf][position_j])[1]: liste.append((i_inf, position_j)) compteur += 1 if j_sup < cologne and obstacles[position_i][j_sup] != None: if (position_i,j_sup) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_sup])[1]: liste.append((position_i, j_sup)) compteur += 1 if j_inf >= 0 and obstacles[position_i][j_inf] != None: if (position_i,j_inf) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_inf])[1]: liste.append((position_i, j_inf)) compteur += 1 pop_block.play() for element in liste: i = element[0] j = element[1] if len(obstacles[i][j]) == 4: if (obstacles[i][j])[2] < 5: (obstacles[i][j])[2] += 1 else: obstacles[i][j] = None points += 1 def destruction_block(i,j): global obstacles , merge_blocks , pourcentage , points liste = [(i, j)] compteur = 1 cpt_global = 1 while compteur != 0: compteur = 0 for element in liste: position_i = element[0] position_j = element[1] i_sup = position_i + 1 i_inf = position_i - 1 j_sup = position_j + 1 j_inf = position_j - 1 if obstacles[position_i][position_j] != None: if i_sup < ligne and obstacles[i_sup][position_j] != None: if (i_sup, position_j) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[i_sup][position_j])[1]: liste.append((i_sup, position_j)) compteur += 1 cpt_global += 1 if i_inf >= 0 and obstacles[i_inf][position_j] != None: if (i_inf, position_j) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[i_inf][position_j])[1]: liste.append((i_inf, position_j)) compteur += 1 cpt_global += 1 if j_sup < cologne and obstacles[position_i][j_sup] != None: if (position_i, j_sup) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_sup])[1]: liste.append((position_i, j_sup)) compteur += 1 cpt_global += 1 if j_inf >= 0 and obstacles[position_i][j_inf] != None: if (position_i, j_inf) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_inf])[1]: liste.append((position_i, j_inf)) compteur += 1 cpt_global += 1 if cpt_global >= 4: pop_block.play() for element in liste: i1 = element[0] j1 = element[1] points += 1 if len(obstacles[i1][j1]) == 4: surface.blit(blocks_fissure[5], (obstacles[i1][j1])[0]) obstacles[i1][j1] = None else: obstacles[i1][j1] = None def save(): fopen = open("Save/sauvegarde.txt","a") fopen.close() fichier = open("Save/sauvegarde.txt","r") ecraser = False list_name = [] lines = fichier.readlines() if len(lines) != 0: for user in lines: for i in range(len(user)): if user[i] == ':': list_name.append([user[0:i-1] , int(user[i+1:])]) for element in list_name: if username == element[0]: ecraser = True if points > element[1]: list_name.remove(element) list_name.append([username,points]) fic = open("Save/sauvegarde.txt","w") for element in list_name: fic.write(element[0]+' : '+str(element[1])+'\n') fic.close() fichier.close() if not ecraser: fichier = open('Save/sauvegarde.txt' , 'a') fic = open('Save/sauvegarde.txt' , 'r') ligne = fic.readline() if len(ligne) != 0: fichier.write("\n"+username+" : "+str(points)) else: fichier.write(username+" : "+str(points)) fic.close() fichier.close() def load(): global game_over fichier = open("Save/sauvegarde.txt" , "r") liste_trie = [] lignes = fichier.readlines() for ligne in lignes: for i in range(len(ligne)): if ligne[i] == ':': liste_trie.append( [ligne[0:i],int(ligne[i+1:])] ) cpt = 1 while cpt != 0: cpt = 0 for i in range(len(liste_trie)-1): element_1 = liste_trie[i] element_2 = liste_trie[i+1] if element_1[1] < element_2[1]: temp = element_1 liste_trie[i] = element_2 liste_trie[i+1] = temp cpt += 1 fichier.close() for event in pygame.event.get(): if event.type == QUIT: game_over = True pos_x , pos_y = 250,150 surface.fill(black) img = pygame.image.load("Screens/HighScore.png") surface.blit(img , (250,-50)) font = pygame.font.Font("Screens/monospace.ttf",30) texte , texte1 = font.render("Name" , True , green) , font.render("Score" , True , green) rectangle , rectangle1 = texte.get_rect() , texte1.get_rect() rectangle.topleft = (pos_x , pos_y) rectangle1.topleft = (pos_x+250,pos_y) surface.blit(texte , rectangle) surface.blit(texte1,rectangle1) pos_y +=50 for element in liste_trie: texte = font.render(element[0] ,True,white) texte1 = font.render(str(element[1]),True,white) rectangle = texte.get_rect() rectangle1 = texte1.get_rect() rectangle.topleft = (pos_x,pos_y) rectangle1.topleft = (pos_x+250,pos_y) surface.blit(texte , rectangle) surface.blit(texte1,rectangle1) pos_y += 50 def pause(): global Paused , game_over if Paused == 1: paused = True font = pygame.font.Font("freesansbold.ttf",40) texte = font.render("Paused",True,white) rectangle = texte.get_rect() rectangle.topleft = (200,50) while paused: pygame.draw.rect(surface,black,rectangle,0) surface.blit(texte,rectangle) for event in pygame.event.get(): if event.type == QUIT: paused = False game_over = True if event.type == KEYDOWN: if event.key == K_p: paused = False Paused += 1 if Paused == 2: Paused = 0 pygame.display.update() def lose(second): global y , GameOver , pourcentage , Death , CountDeath if second == 80: if len(death_depth) != 0: i,j = death_depth[0],death_depth[1] for k in range(i, -1, -1): obstacles[k][j] = None if second >= 100: y -= gravity if second == 200: GameOver = False pourcentage = 100 Death = 0 CountDeath -= 1 def gameover(death_screen): global game_over for event in pygame.event.get(): if event.type == QUIT: game_over = True if death_screen == 0: pygame.mixer.music.stop() pygame.mixer.music.load("Music/GameOver.wav") pygame.mixer.music.play(0) img = pygame.image.load("Screens/GameOver.jpg") img = pygame.transform.scale(img, (width, height)) surface.blit(img , (0,0)) def win(win_screen): global game_over for event in pygame.event.get(): if event.type == QUIT: game_over = True if win_screen == 0: pygame.mixer.music.stop() pygame.mixer.music.load("Music/WinTheme.wav") pygame.mixer.music.play(0) surface.fill(black) img = pygame.image.load("Screens/Win.jpg") img = pygame.transform.scale(img, (width, height-200)) surface.blit(img , (0,200)) message = "Congratulations !!" message1 = "Game Completed" font = pygame.font.Font("freesansbold.ttf",50) texte = font.render(message,True,white) texte1 = font.render(message1,True,white) rectangle = texte.get_rect() rectangle.topleft = (200,50) rectangle1 = texte1.get_rect() rectangle1.topleft = (200,120) surface.blit(texte , rectangle) surface.blit(texte1 , rectangle1) def main(): global game_over , collision_horizontal , collision_vertical_left , collision_vertical_right , jump , driller,\ x, y , obstacles , drill_ticker , liste_blocks , merge_blocks , pourcentage , points , second_death , CountDeath , Capsule_Air , NextLevel , level , Paused drill_ticker , second = 0 , 0 liste_blocks = [] merge_blocks = [] second_death = 0 driller , level = 1 , 1 pygame.mixer.music.load("Music/main.mp3") pygame.mixer.music.play(-1) NextLevel = False cpt_save , Paused= 0 , 0 death_screen , win_screen = 0,0 while not game_over: clock.tick(60) if CountDeath != 0 and level != 11: collision_horizontal = False collision_vertical_left = False collision_vertical_right = False jump = False if NextLevel: initialise() level += 1 pourcentage = 100 points += 10 Capsule_Air -= 1 NextLevel = False driller = pygame.Rect(x, y, 40, 50) for event in pygame.event.get(): if event.type == QUIT: game_over = True if event.type == KEYDOWN: if event.key == K_p: Paused += 1 if drill_ticker > 0: drill_ticker -= 1 if not GameOver: second_death = 0 second += 1 if second == 60: second = 0 pourcentage -= 1 if y >= 300: y -= 50 for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: (obstacles[i][j])[0].y -= 50 if y <= 100: y += 50 for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: (obstacles[i][j])[0].y += 50 else: second_death += 1 lose(second_death) for element in liste_blocks: element[2] += 1 for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: if len(obstacles[i][j]) == 4: if (obstacles[i][j])[2] == 5: (obstacles[i][j])[3] += 1 if (obstacles[i][j])[3] == 20: obstacles[i][j] = None pourcentage -= 20 points += 1 if obstacles[i][j] != None: if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] >= 1: (obstacles[i][j])[2] += 1 if (obstacles[i][j])[2] == 500: obstacles[i][j] = None pop_block.play() draw() air() collisions_player() gravity_blocks() move() events() score(points) depth(profondeur) lives(CountDeath) levels() pause() else: if cpt_save == 0: save() cpt_save += 1 if CountDeath == 0 and death_screen < 100: gameover(death_screen) death_screen += 1 elif level == 11 and win_screen < 100: win(win_screen) win_screen += 1 else: load() pygame.display.update() pygame.quit() # Lancemant : launch , username = saisie() if launch == True: play = intro() if play == True: pygame.mixer.music.stop() initialise() main() else: pygame.quit() else: pygame.quit()
28.186104
158
0.601726
import pygame, time from pygame.locals import * from random import * pygame.init() white = (255, 255, 255) crystal = (162,162,162) black = (0, 0, 0) rose = (236,28,115) red = pygame.Color('#ff0000') green = pygame.Color('#00ff62') blue = pygame.Color('#0026ff') yellow = (222,207,4) width = 800 height = 600 clock = pygame.time.Clock() pop_block = pygame.mixer.Sound("Music/pop_block.wav") walkRight = [pygame.image.load('Driller/droite1.png'), pygame.image.load('Driller/droite2.png'), pygame.image.load('Driller/droite3.png'),pygame.image.load('Driller/droite4.png'), pygame.image.load('Driller/droite5.png'), pygame.image.load('Driller/droite6.png'), pygame.image.load('Driller/droite7.png'), pygame.image.load('Driller/droite8.png'), pygame.image.load('Driller/droite9.png')] walkLeft = [pygame.image.load('Driller/gauche1.png'), pygame.image.load('Driller/gauche2.png'), pygame.image.load('Driller/gauche3.png'),pygame.image.load('Driller/gauche4.png'), pygame.image.load('Driller/gauche5.png'),pygame.image.load('Driller/gauche6.png'), pygame.image.load('Driller/gauche7.png'),pygame.image.load('Driller/gauche8.png'), pygame.image.load('Driller/gauche9.png')] fall = [ pygame.image.load('Driller/fall.png'), pygame.image.load('Driller/fall1.png') ] centre = pygame.image.load('Driller/centre.png') blocks = [ pygame.image.load("Blocks/block_jaune.png"), pygame.image.load("Blocks/block_vert.png"), pygame.image.load("Blocks/block_bleu.png"), pygame.image.load("Blocks/block_rouge.png"), pygame.image.load("Blocks/block_blanc.png"), pygame.image.load("Blocks/block_crystal.png"), pygame.image.load("Blocks/block_niveau.png") ] blocks_fissure = [ pygame.image.load("Blocks/block.png"), pygame.image.load("Blocks/block1.png"), pygame.image.load("Blocks/block2.png"), pygame.image.load("Blocks/block3.png"), pygame.image.load("Blocks/block4.png"), pygame.image.load("Blocks/block5.png") ] image_drill_left = pygame.image.load("Driller/drill_left.png") image_drill_right = pygame.image.load("Driller/drill_right.png") image_drill_down = pygame.image.load("Driller/drill_down.png") oxy_display = pygame.image.load("Blocks/oxy_display.png") capsule = pygame.image.load("Blocks/capsule_oxygene.png") dead_crash = pygame.image.load("Driller/ecraser.png") dead_air = pygame.image.load("Driller/asph.png") ange = pygame.image.load("Driller/ange.png") depth_display = pygame.image.load("Blocks/depth.png") score_display = pygame.image.load("Blocks/score.png") level_display = pygame.image.load("Blocks/level.png") air_display = pygame.image.load("Blocks/air.png") air_support_display=pygame.image.load("Blocks/air_support.png") air_pourcent_display = pygame.image.load("Blocks/pourcent.png") lives_display = pygame.image.load("Blocks/lives.png") drill_left = False drill_right = False compteur_drill = 0 temps_recuperer = 0 cologne = 12 ligne = 35 game_over = False surface = pygame.display.set_mode( (width,height) ) pygame.display.set_caption("Mr Driller") obstacles = [[None]*cologne for l in range(ligne) ] x = 100 y = 5 gravity = 5 left = False right = False walkCount = 0 fallCount = 0 pourcentage = 100 points = 0 profondeur = 0 GameOver = False Death = 0 death_depth = [] CountDeath = 3 Capsule_Air = 10 name_list = [] def saisie(): global name_list running = True play = False while running: for event in pygame.event.get(): if event.type == pygame.QUIT: running = False if event.type == KEYDOWN: if event.key == K_RETURN: running = False play = True if event.type == pygame.KEYDOWN and len(name_list) != 30: if event.key == pygame.K_a: name_list.append("a") elif event.key == pygame.K_b: name_list.append("b") elif event.key == pygame.K_c: name_list.append("c") elif event.key == pygame.K_d: name_list.append("d") elif event.key == pygame.K_e: name_list.append("e") elif event.key == pygame.K_f: name_list.append("f") elif event.key == pygame.K_g: name_list.append("g") elif event.key == pygame.K_h: name_list.append("h") elif event.key == pygame.K_i: name_list.append("i") elif event.key == pygame.K_j: name_list.append("j") elif event.key == pygame.K_k: name_list.append("k") elif event.key == pygame.K_l: name_list.append("l") elif event.key == pygame.K_m: name_list.append("m") elif event.key == pygame.K_n: name_list.append("n") elif event.key == pygame.K_o: name_list.append("o") elif event.key == pygame.K_p: name_list.append("p") elif event.key == pygame.K_q: name_list.append("q") elif event.key == pygame.K_r: name_list.append("r") elif event.key == pygame.K_s: name_list.append("s") elif event.key == pygame.K_t: name_list.append("t") elif event.key == pygame.K_u: name_list.append("u") elif event.key == pygame.K_v: name_list.append("v") elif event.key == pygame.K_w: name_list.append("w") elif event.key == pygame.K_x: name_list.append("x") elif event.key == pygame.K_y: name_list.append("y") elif event.key == pygame.K_z: name_list.append("z") elif event.key == pygame.K_SPACE: name_list.append(" ") if event.type == pygame.KEYDOWN: if event.key == pygame.K_BACKSPACE and len(name_list) > 0: name_list.pop(-1) ecran_saisie = pygame.image.load("Screens/EnterNameBetter.png") ecran_saisie = pygame.transform.scale(ecran_saisie, (width, height)) surface.blit(ecran_saisie,(0,0)) string = ''.join(name_list) font = pygame.font.Font("Screens/monospace.ttf" , 40) texte = font.render(string , True , (0,0,0)) rectangle = texte.get_rect() rectangle.topleft = (150,130) surface.blit(texte,rectangle) pygame.display.update() clock.tick(60) return play , string def air(): global pourcentage , GameOver , Death , x , death_depth pos_x = 620 pos_y = 300 font = pygame.font.Font("freesansbold.ttf", 30) if pourcentage <= 0: GameOver = True Death = 1 if pourcentage > 100: pourcentage = 100 text_temps = font.render(str(pourcentage), True, white) list_rotato = [oxy_display for loop in range(pourcentage)] surface.blit(text_temps, (pos_x+80, pos_y+40)) surface.blit(air_display,(pos_x-20,pos_y-50)) surface.blit(air_support_display,(pos_x-8,pos_y-3)) surface.blit(air_pourcent_display,(pos_x+135,pos_y+40 )) longueur_barre = 0 for k in list_rotato: surface.blit(k, (pos_x + longueur_barre, pos_y)) longueur_barre += 1.5 def score(points): pos_x = 620 pos_y = 150 font = pygame.font.Font("freesansbold.ttf", 30) pygame.draw.circle(surface,rose,(pos_x,pos_y+20),10,0) pygame.draw.circle(surface,rose,(pos_x+30,pos_y+20),10,0) text_score = font.render(str(points), True, white) text = font.render("PTS", True, rose) surface.blit(text_score, (pos_x+80, pos_y+30)) surface.blit(text, (pos_x+100, pos_y+60)) surface.blit(score_display,(pos_x-20,pos_y-30)) def depth(profondeur): pos_x = 620 pos_y = 50 font = pygame.font.Font("freesansbold.ttf", 30) pygame.draw.circle(surface, yellow, (pos_x, pos_y), 10, 0) pygame.draw.circle(surface, yellow, (pos_x + 30, pos_y), 10, 0) text_score = font.render(str(profondeur), True, white) text = font.render("FT", True, yellow) surface.blit(text_score, (pos_x + 80, pos_y)) surface.blit(text, (pos_x + 100, pos_y + 30)) surface.blit(depth_display,(600,0)) def lives(DeathCount): pos_x = 560 pos_y = 400 font = pygame.font.Font("freesansbold.ttf", 30) text_score = font.render(str(DeathCount), True, white) text = font.render("x", True, red) surface.blit(text_score, (pos_x + 180, pos_y+32)) surface.blit(text, (pos_x + 150, pos_y+30)) surface.blit(ange,(pos_x + 80, pos_y+5)) surface.blit(lives_display,(600,pos_y-25)) def levels(): pos_x=600 pos_y= 480 font = pygame.font.Font("freesansbold.ttf", 30) text_level = font.render(str(level), True, white) surface.blit(text_level, (pos_x+50 , pos_y+50)) surface.blit(level_display,(pos_x,pos_y)) def chrono(seconds): time.sleep(1) return (seconds + 1) def intro(): pygame.mixer.music.load("Intro/intro_music.mp3") pygame.display.flip() font = pygame.font.Font(None, 24) clock = pygame.time.Clock() seconds = 0 nextimg = 1 images = [ pygame.image.load("Intro/Start_screen1.png"), pygame.image.load("Intro/Start_screen2.png"), pygame.image.load("Intro/Start_screen3.png"), pygame.image.load("Intro/Start_screen4.png"), pygame.image.load("Intro/Start_screen5.png"), pygame.image.load("Intro/Start_screen6.png"), pygame.image.load("Intro/Start_screen7.png"), pygame.image.load("Intro/Start_screen8.png") ] pygame.mixer.music.play(0) running = True play = False while running: seconds = chrono(seconds) if seconds > 0 and seconds % 3 == 0: nextimg += 1 if nextimg <= len(images): choix_image = images[nextimg-1] choix_image = pygame.transform.scale(choix_image, (width, height)) text_temps = font.render(str(seconds) + " seconds since start", 1,(255, 255, 255)) # petite indicateur de temps surface.blit(choix_image, (0, 0)) surface.blit(text_temps, (0, 0)) for event in pygame.event.get(): if event.type == pygame.QUIT: running = False if event.type == KEYDOWN: if event.key == K_SPACE: running = False play = True pygame.display.update() clock.tick(60) return play def initialise(): global obstacles x_cube = 0 hauteur = y+200 caps = Capsule_Air for i in range(0,ligne-5): for j in range(cologne): if caps != 0: square_type = randint(1,8) else: square_type = randint(1,7) if square_type == 8: caps -= 1 square = pygame.Rect(x_cube, hauteur, 50, 50) if square_type == 5: obstacles[i][j] = [square, square_type,0,0] elif square_type == 7: obstacles[i][j] = [square, square_type,0] else: obstacles[i][j] = [square, square_type] cpt = 3 while cpt >= 3: cpt = 0 for k in range(j - 1, j - 4, -1): if k >= 0: if obstacles[i][k] != None: if (obstacles[i][k])[1] == (obstacles[i][j])[1]: cpt += 1 for l in range(i - 1, i - 4, -1): if l >= 0: if obstacles[l][j] != None: if (obstacles[l][j])[1] == (obstacles[i][j])[1]: cpt += 1 if cpt >= 3: square_type = randint(1, 7) if square_type == 5: obstacles[i][j] = [square, square_type, 0, 0] elif square_type == 7: obstacles[i][j] = [square, square_type, 0] else: obstacles[i][j] = [square, square_type] x_cube += 50 x_cube = 0 hauteur += 50 hauteur += 400 for i in range(ligne-5 , ligne): for j in range(cologne): square = pygame.Rect(x_cube, hauteur, 50, 50) obstacles[i][j] = [square , 9] x_cube += 50 x_cube = 0 hauteur += 50 def draw(): global collision_vertical , x , y , obstacles surface.fill(black) pygame.draw.line(surface, white, (600, 0), (600, height)) pygame.draw.line(surface, rose, (600, 125), (width, 125)) pygame.draw.line(surface, rose, (600, 250), (width, 250)) pygame.draw.line(surface, rose, (600, 375), (width, 375)) pygame.draw.line(surface, rose, (600, 500), (width, 500)) for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: if (obstacles[i][j])[1] == 1: #pygame.draw.rect(surface, red, (obstacles[i][j])[0]) surface.blit( blocks[3] , (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 2: #pygame.draw.rect(surface, blue, (obstacles[i][j])[0]) surface.blit( blocks[2] , (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 3: #pygame.draw.rect(surface, yellow, (obstacles[i][j])[0]) surface.blit( blocks[0] , (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 4: #pygame.draw.rect(surface, green, (obstacles[i][j])[0]) surface.blit( blocks[1] , (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 5: surface.blit(blocks_fissure[ (obstacles[i][j])[2] ], (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 6: #pygame.draw.rect(surface, white, (obstacles[i][j])[0]) surface.blit(blocks[4], (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 7: #pygame.draw.rect(surface, crystal, (obstacles[i][j])[0]) surface.blit(blocks[5], (obstacles[i][j])[0]) elif (obstacles[i][j])[1] == 8: surface.blit(capsule, (obstacles[i][j])[0]) else: surface.blit(blocks[6], (obstacles[i][j])[0]) def move(): global walkCount , fallCount ,x, y , liste_blocks , compteur_drill , GameOver , Death , second_death , obstacles \ , death_depth if walkCount + 1 >= 27: walkCount = 0 if fallCount+1 == 6: fallCount = 0 if Death == 2: if second_death >= 100: image_ange = ange image_ange = pygame.transform.scale(image_ange, (55, 55)) surface.blit(image_ange, (x - 10, y - 10)) else: image_death = dead_crash image_death = pygame.transform.scale(image_death, (55, 55)) surface.blit(image_death, (x - 10, y - 10)) elif Death == 1: if second_death >= 100: image_ange = ange image_ange = pygame.transform.scale(image_ange, (55, 55)) surface.blit(image_ange, (x - 10, y - 10)) else: image_air = dead_air image_air = pygame.transform.scale(image_air, (55, 55)) surface.blit(image_air, (x - 10, y - 10)) elif not collision_horizontal: image_fall = pygame.transform.scale(fall[fallCount // 3], (55, 55)) surface.blit(image_fall, (x - 10, y - 10)) fallCount += 1 y += gravity elif compteur_drill != 0: if drill_right and not drill_left: image_d_right = image_drill_right image_d_right = pygame.transform.scale(image_d_right, (55, 55)) surface.blit(image_d_right, (x - 10, y - 10)) elif not drill_right and drill_left: image_d_left = image_drill_left image_d_left = pygame.transform.scale(image_d_left, (55, 55)) surface.blit(image_d_left, (x - 10, y - 10)) else: image_d_down = image_drill_down image_d_down = pygame.transform.scale(image_d_down, (55, 55)) surface.blit(image_d_down, (x - 10, y - 10)) compteur_drill -= 1 else: if left == True: image_left = walkLeft[walkCount//3] image_left = pygame.transform.scale(image_left, (55, 55)) surface.blit(image_left , (x-10,y-10)) walkCount += 1 elif right == True: image_right = walkRight[walkCount // 3] image_right = pygame.transform.scale(image_right, (55, 55)) surface.blit(image_right , (x-10,y-10)) walkCount += 1 else: image_centre = pygame.transform.scale(centre, (55, 55)) surface.blit(image_centre, (x - 10, y - 10)) for element in liste_blocks: square = element[0] compteur = element[1] seconds_gravity = element[2] if compteur == 50: i,j = element[5] , element[3] destruction_block(i,j) liste_blocks.remove(element) else: if compteur == 0: if seconds_gravity == 100: square.x = element[3]*50 square.y += gravity element[1] += gravity else: if seconds_gravity % 5 == 0: if element[4] == -2: element[4] = 2 else: element[4] = -2 square.x += element[4] else: square.y += gravity element[1] += gravity i,j = element[5] , element[3] if obstacles[i][j] != None: if (obstacles[i][j])[1] != 8: if (square.bottom-5 > driller.top and ( square.left-5 < driller.left < square.right-5 or square.left+5 < driller.right < square.right+5) ): GameOver = True Death = 2 death_depth = [i,j] def events(): global left , right , x , y , walkCount , collision_vertical_right , collision_vertical_left , drill_right , drill_left keys = pygame.key.get_pressed() if compteur_drill == 0: if not GameOver: if keys[pygame.K_LEFT] and x > 5: if not collision_vertical_left: x -= 5 left = True right = False drill_right = False drill_left = False elif keys[pygame.K_RIGHT] and x < 560: if not collision_vertical_right: x += 5 drill_right = False drill_left = False right = True left = False else: right = False left = False drill_left = False drill_right = False walkCount = 0 if jump == True: if not GameOver: if keys[pygame.K_SPACE]: y -= 55 def collisions_player(): global collision_vertical_right , collision_vertical_left , collision_horizontal , x,y , jump , obstacles ,\ drill_ticker , drill_right , drill_left , compteur_drill , pourcentage , points , profondeur , death_depth keys = pygame.key.get_pressed() liste = [] for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: square = (obstacles[i][j])[0] if driller.colliderect(square): if y == square.y - 45: collision_horizontal = True liste.append((i,j)) profondeur = ( ligne*(level-1) ) + i else: jmp = False if x == square.x + 45: if (obstacles[i][j])[1] != 8: collision_vertical_left = True if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_a] and drill_ticker == 0: collisions_blocks(i, j) drill_left = True drill_right = False compteur_drill = 20 drill_ticker = 20 if i != 0: if (obstacles[i - 1][j]) != None: if (obstacles[i - 1][j ])[1] != 8: if (obstacles[i - 1][j])[0].bottom + 5 == driller.top: jmp = True if (obstacles[i - 1][j+1]) != None: if (obstacles[i - 1][j+1])[1] != 8: if (obstacles[i - 1][j+1])[0].bottom + 5 == driller.top: jmp = True if jmp == False: jump = True else: jump = True else: obstacles[i][j] = None points += 1 pourcentage += 20 if x == square.x - 35: if (obstacles[i][j])[1] != 8: collision_vertical_right = True if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_e] and drill_ticker == 0: drill_ticker = 20 collisions_blocks(i, j) drill_right = True drill_left = False compteur_drill = 20 if i != 0: if (obstacles[i-1][j]) != None: if (obstacles[i - 1][j])[1] != 8: if (obstacles[i-1][j])[0].bottom+5 == driller.top: jmp = True if (obstacles[i-1][j-1]) != None: if (obstacles[i - 1][j - 1])[1] != 8: if (obstacles[i-1][j-1])[0].bottom+5 == driller.top: jmp = True if jmp == False: jump = True else: jump = True else: obstacles[i][j] = None points += 1 pourcentage += 20 for element in liste: i = element[0] j = element[1] if obstacles[i][j] != None: square = (obstacles[i][j])[0] if len(liste) == 2: if square.x+15 == x: if Death == 1: death_depth = [i,j] x -= 5 if (obstacles[i][j])[1] != 8: if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_z]: # Right if obstacles[i][j] != None: collisions_blocks(i, j) x -= 5 drill_ticker = 20 drill_right = True drill_left = False compteur_drill = 20 else: obstacles[i][j] = None points += 1 pourcentage += 20 elif square.x-5 == x: if Death == 1: death_depth = [i, j] x += 5 if (obstacles[i][j])[1] != 8: if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_z]: # Left if obstacles[i][j] != None: collisions_blocks(i, j) x += 5 drill_right = False drill_right = True drill_ticker = 20 compteur_drill = 20 else: obstacles[i][j] = None points += 1 pourcentage += 20 else: if Death == 1: death_depth = [i, j] if (obstacles[i][j])[1] != 8: if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] == 0: (obstacles[i][j])[2] += 1 if not GameOver: if keys[pygame.K_z]: # Down if obstacles[i][j] != None and drill_ticker == 0: drill_ticker = 20 collisions_blocks(i, j) drill_right = False drill_right = False compteur_drill = 20 else: obstacles[i][j] = None points += 1 pourcentage += 20 def gravity_blocks(): global obstacles , gravity , liste_blocks liste = [] for i in range(1 , ligne): for j in range(0,cologne): if obstacles[i][j] == None and obstacles[i-1][j] != None: liste.append( (i-1 , i , j) ) for element in liste: i = element[1] i_1 = element[0] j = element[2] j_sup = j+1 if (obstacles[i_1][j])[1] != 6: continue_sup = False while j_sup < cologne and i_1+1 < ligne: if obstacles[i_1][j_sup] != None: if (obstacles[i_1][j])[1] == (obstacles[i_1][j_sup])[1]: if obstacles[i_1+1][j_sup] != None: continue_sup = True break else: break else: break j_sup += 1 if continue_sup: continue j_inf = j-1 continue_inf = False while j_inf < cologne and i_1 + 1 < ligne: if obstacles[i_1][j_inf] != None: if (obstacles[i_1][j])[1] == (obstacles[i_1][j_inf])[1]: if obstacles[i_1 + 1][j_inf] != None: continue_inf = True break else: break else: break j_inf -= 1 if continue_inf: continue obstacles[i][j] = obstacles[i_1][j] obstacles[i_1][j] = None liste_blocks.append( [ (obstacles[i][j])[0] , 0 , 0 , j , 2, i ] ) def collisions_blocks(i,j): global obstacles , points , NextLevel if (obstacles[i][j])[1] == 9: NextLevel = True elif (obstacles[i][j])[1] != 8: liste = [ (i,j) ] compteur = 1 while compteur != 0: compteur = 0 for element in liste: position_i = element[0] position_j = element[1] i_sup = position_i + 1 i_inf = position_i - 1 j_sup = position_j + 1 j_inf = position_j - 1 if i_sup < ligne and obstacles[i_sup][position_j] != None: if (i_sup , position_j) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[i_sup][position_j])[1]: liste.append((i_sup, position_j)) compteur += 1 if i_inf >= 0 and obstacles[i_inf][position_j] != None: if (i_inf , position_j) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[i_inf][position_j])[1]: liste.append((i_inf, position_j)) compteur += 1 if j_sup < cologne and obstacles[position_i][j_sup] != None: if (position_i,j_sup) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_sup])[1]: liste.append((position_i, j_sup)) compteur += 1 if j_inf >= 0 and obstacles[position_i][j_inf] != None: if (position_i,j_inf) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_inf])[1]: liste.append((position_i, j_inf)) compteur += 1 pop_block.play() for element in liste: i = element[0] j = element[1] if len(obstacles[i][j]) == 4: if (obstacles[i][j])[2] < 5: (obstacles[i][j])[2] += 1 else: obstacles[i][j] = None points += 1 def destruction_block(i,j): global obstacles , merge_blocks , pourcentage , points liste = [(i, j)] compteur = 1 cpt_global = 1 while compteur != 0: compteur = 0 for element in liste: position_i = element[0] position_j = element[1] i_sup = position_i + 1 i_inf = position_i - 1 j_sup = position_j + 1 j_inf = position_j - 1 if obstacles[position_i][position_j] != None: if i_sup < ligne and obstacles[i_sup][position_j] != None: if (i_sup, position_j) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[i_sup][position_j])[1]: liste.append((i_sup, position_j)) compteur += 1 cpt_global += 1 if i_inf >= 0 and obstacles[i_inf][position_j] != None: if (i_inf, position_j) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[i_inf][position_j])[1]: liste.append((i_inf, position_j)) compteur += 1 cpt_global += 1 if j_sup < cologne and obstacles[position_i][j_sup] != None: if (position_i, j_sup) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_sup])[1]: liste.append((position_i, j_sup)) compteur += 1 cpt_global += 1 if j_inf >= 0 and obstacles[position_i][j_inf] != None: if (position_i, j_inf) not in liste: if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_inf])[1]: liste.append((position_i, j_inf)) compteur += 1 cpt_global += 1 if cpt_global >= 4: pop_block.play() for element in liste: i1 = element[0] j1 = element[1] points += 1 if len(obstacles[i1][j1]) == 4: surface.blit(blocks_fissure[5], (obstacles[i1][j1])[0]) obstacles[i1][j1] = None else: obstacles[i1][j1] = None def save(): fopen = open("Save/sauvegarde.txt","a") fopen.close() fichier = open("Save/sauvegarde.txt","r") ecraser = False list_name = [] lines = fichier.readlines() if len(lines) != 0: for user in lines: for i in range(len(user)): if user[i] == ':': list_name.append([user[0:i-1] , int(user[i+1:])]) for element in list_name: if username == element[0]: ecraser = True if points > element[1]: list_name.remove(element) list_name.append([username,points]) fic = open("Save/sauvegarde.txt","w") for element in list_name: fic.write(element[0]+' : '+str(element[1])+'\n') fic.close() fichier.close() if not ecraser: fichier = open('Save/sauvegarde.txt' , 'a') fic = open('Save/sauvegarde.txt' , 'r') ligne = fic.readline() if len(ligne) != 0: fichier.write("\n"+username+" : "+str(points)) else: fichier.write(username+" : "+str(points)) fic.close() fichier.close() def load(): global game_over fichier = open("Save/sauvegarde.txt" , "r") liste_trie = [] lignes = fichier.readlines() for ligne in lignes: for i in range(len(ligne)): if ligne[i] == ':': liste_trie.append( [ligne[0:i],int(ligne[i+1:])] ) cpt = 1 while cpt != 0: cpt = 0 for i in range(len(liste_trie)-1): element_1 = liste_trie[i] element_2 = liste_trie[i+1] if element_1[1] < element_2[1]: temp = element_1 liste_trie[i] = element_2 liste_trie[i+1] = temp cpt += 1 fichier.close() for event in pygame.event.get(): if event.type == QUIT: game_over = True pos_x , pos_y = 250,150 surface.fill(black) img = pygame.image.load("Screens/HighScore.png") surface.blit(img , (250,-50)) font = pygame.font.Font("Screens/monospace.ttf",30) texte , texte1 = font.render("Name" , True , green) , font.render("Score" , True , green) rectangle , rectangle1 = texte.get_rect() , texte1.get_rect() rectangle.topleft = (pos_x , pos_y) rectangle1.topleft = (pos_x+250,pos_y) surface.blit(texte , rectangle) surface.blit(texte1,rectangle1) pos_y +=50 for element in liste_trie: texte = font.render(element[0] ,True,white) texte1 = font.render(str(element[1]),True,white) rectangle = texte.get_rect() rectangle1 = texte1.get_rect() rectangle.topleft = (pos_x,pos_y) rectangle1.topleft = (pos_x+250,pos_y) surface.blit(texte , rectangle) surface.blit(texte1,rectangle1) pos_y += 50 def pause(): global Paused , game_over if Paused == 1: paused = True font = pygame.font.Font("freesansbold.ttf",40) texte = font.render("Paused",True,white) rectangle = texte.get_rect() rectangle.topleft = (200,50) while paused: pygame.draw.rect(surface,black,rectangle,0) surface.blit(texte,rectangle) for event in pygame.event.get(): if event.type == QUIT: paused = False game_over = True if event.type == KEYDOWN: if event.key == K_p: paused = False Paused += 1 if Paused == 2: Paused = 0 pygame.display.update() def lose(second): global y , GameOver , pourcentage , Death , CountDeath if second == 80: if len(death_depth) != 0: i,j = death_depth[0],death_depth[1] for k in range(i, -1, -1): obstacles[k][j] = None if second >= 100: y -= gravity if second == 200: GameOver = False pourcentage = 100 Death = 0 CountDeath -= 1 def gameover(death_screen): global game_over for event in pygame.event.get(): if event.type == QUIT: game_over = True if death_screen == 0: pygame.mixer.music.stop() pygame.mixer.music.load("Music/GameOver.wav") pygame.mixer.music.play(0) img = pygame.image.load("Screens/GameOver.jpg") img = pygame.transform.scale(img, (width, height)) surface.blit(img , (0,0)) def win(win_screen): global game_over for event in pygame.event.get(): if event.type == QUIT: game_over = True if win_screen == 0: pygame.mixer.music.stop() pygame.mixer.music.load("Music/WinTheme.wav") pygame.mixer.music.play(0) surface.fill(black) img = pygame.image.load("Screens/Win.jpg") img = pygame.transform.scale(img, (width, height-200)) surface.blit(img , (0,200)) message = "Congratulations !!" message1 = "Game Completed" font = pygame.font.Font("freesansbold.ttf",50) texte = font.render(message,True,white) texte1 = font.render(message1,True,white) rectangle = texte.get_rect() rectangle.topleft = (200,50) rectangle1 = texte1.get_rect() rectangle1.topleft = (200,120) surface.blit(texte , rectangle) surface.blit(texte1 , rectangle1) def main(): global game_over , collision_horizontal , collision_vertical_left , collision_vertical_right , jump , driller,\ x, y , obstacles , drill_ticker , liste_blocks , merge_blocks , pourcentage , points , second_death , CountDeath , Capsule_Air , NextLevel , level , Paused drill_ticker , second = 0 , 0 liste_blocks = [] merge_blocks = [] second_death = 0 driller , level = 1 , 1 pygame.mixer.music.load("Music/main.mp3") pygame.mixer.music.play(-1) NextLevel = False cpt_save , Paused= 0 , 0 death_screen , win_screen = 0,0 while not game_over: clock.tick(60) if CountDeath != 0 and level != 11: collision_horizontal = False collision_vertical_left = False collision_vertical_right = False jump = False if NextLevel: initialise() level += 1 pourcentage = 100 points += 10 Capsule_Air -= 1 NextLevel = False driller = pygame.Rect(x, y, 40, 50) for event in pygame.event.get(): if event.type == QUIT: game_over = True if event.type == KEYDOWN: if event.key == K_p: Paused += 1 if drill_ticker > 0: drill_ticker -= 1 if not GameOver: second_death = 0 second += 1 if second == 60: second = 0 pourcentage -= 1 if y >= 300: y -= 50 for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: (obstacles[i][j])[0].y -= 50 if y <= 100: y += 50 for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: (obstacles[i][j])[0].y += 50 else: second_death += 1 lose(second_death) for element in liste_blocks: element[2] += 1 for i in range(ligne): for j in range(cologne): if obstacles[i][j] != None: if len(obstacles[i][j]) == 4: if (obstacles[i][j])[2] == 5: (obstacles[i][j])[3] += 1 if (obstacles[i][j])[3] == 20: obstacles[i][j] = None pourcentage -= 20 points += 1 if obstacles[i][j] != None: if (obstacles[i][j])[1] == 7: if (obstacles[i][j])[2] >= 1: (obstacles[i][j])[2] += 1 if (obstacles[i][j])[2] == 500: obstacles[i][j] = None pop_block.play() draw() air() collisions_player() gravity_blocks() move() events() score(points) depth(profondeur) lives(CountDeath) levels() pause() else: if cpt_save == 0: save() cpt_save += 1 if CountDeath == 0 and death_screen < 100: gameover(death_screen) death_screen += 1 elif level == 11 and win_screen < 100: win(win_screen) win_screen += 1 else: load() pygame.display.update() pygame.quit() # Lancemant : launch , username = saisie() if launch == True: play = intro() if play == True: pygame.mixer.music.stop() initialise() main() else: pygame.quit() else: pygame.quit()
true
true
f703ed40d35384b8567eb44be81ef99bdda43a53
5,508
py
Python
src/cogs/normal/owner-normal.py
ChrisKalahiki/ruger-bot
40043094890e88956e3252a83b5c15ac108a5187
[ "MIT" ]
null
null
null
src/cogs/normal/owner-normal.py
ChrisKalahiki/ruger-bot
40043094890e88956e3252a83b5c15ac108a5187
[ "MIT" ]
null
null
null
src/cogs/normal/owner-normal.py
ChrisKalahiki/ruger-bot
40043094890e88956e3252a83b5c15ac108a5187
[ "MIT" ]
null
null
null
import json import os import sys import disnake from disnake.ext import commands from disnake.ext.commands import Context from helpers import json_manager, checks import logging if not os.path.isfile("../config.json"): sys.exit("'config.json' not found by general-normal! Please add it and try again.") else: with open("../config.json") as file: config = json.load(file) ''' Logging ''' logger = logging.getLogger('discord') logger.setLevel(logging.INFO) handler = logging.FileHandler(filename='../logs/discord.log', encoding='utf-8',mode='w') handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) logger.addHandler(handler) class Owner(commands.Cog, name="owner-normal"): def __init__(self, bot): self.bot = bot @commands.command( name="shutdown", description="Make the bot shutdown.", ) @checks.is_owner() async def shutdown(self, context: Context): """ Makes the bot shutdown. """ embed = disnake.Embed( description="Shutting down. Bye! :wave:", color=0x9C84EF ) logger.info(f"Shutting down. Bye! :wave:") await context.send(embed=embed) await self.bot.close() @commands.command( name="say", description="The bot will say anything you want.", ) @checks.is_owner() async def say(self, context: Context, *, message: str): """ The bot will say anything you want. """ logger.info(f"Saying '{message}'") await context.send(message) @commands.command( name="embed", description="The bot will say anything you want, but within embeds.", ) @checks.is_owner() async def embed(self, context: Context, *, message: str): """ The bot will say anything you want, but within embeds. """ embed = disnake.Embed( description=message, color=0x9C84EF ) logger.info(f"Saying '{message}'") await context.send(embed=embed) @commands.group( name="blacklist" ) async def blacklist(self, context: Context): """ Lets you add or remove a user from not being able to use the bot. """ if context.invoked_subcommand is None: with open("../data/blacklist.json") as file: blacklist = json.load(file) embed = disnake.Embed( title=f"There are currently {len(blacklist['ids'])} blacklisted IDs", description=f"{', '.join(str(id) for id in blacklist['ids'])}", color=0x9C84EF ) await context.send(embed=embed) @blacklist.command( name="add" ) async def blacklist_add(self, context: Context, member: disnake.Member = None): """ Lets you add a user from not being able to use the bot. """ try: user_id = member.id with open("../data/blacklist.json") as file: blacklist = json.load(file) if user_id in blacklist['ids']: embed = disnake.Embed( title="Error!", description=f"**{member.name}** is already in the blacklist.", color=0xE02B2B ) return await context.send(embed=embed) json_manager.add_user_to_blacklist(user_id) embed = disnake.Embed( title="User Blacklisted", description=f"**{member.name}** has been successfully added to the blacklist", color=0x9C84EF ) with open("../data/blacklist.json") as file: blacklist = json.load(file) embed.set_footer( text=f"There are now {len(blacklist['ids'])} users in the blacklist" ) logger.info(f"{member.name} has been added to the blacklist.") await context.send(embed=embed) except: embed = disnake.Embed( title="Error!", description=f"An unknown error occurred when trying to add **{member.name}** to the blacklist.", color=0xE02B2B ) await context.send(embed=embed) @blacklist.command( name="remove" ) async def blacklist_remove(self, context, member: disnake.Member = None): """ Lets you remove a user from not being able to use the bot. """ try: user_id = member.id json_manager.remove_user_from_blacklist(user_id) embed = disnake.Embed( title="User removed from blacklist", description=f"**{member.name}** has been successfully removed from the blacklist", color=0x9C84EF ) with open("../data/blacklist.json") as file: blacklist = json.load(file) embed.set_footer( text=f"There are now {len(blacklist['ids'])} users in the blacklist" ) logger.info(f"{member.name} has been removed from the blacklist.") await context.send(embed=embed) except: embed = disnake.Embed( title="Error!", description=f"**{member.name}** is not in the blacklist.", color=0xE02B2B ) await context.send(embed=embed) def setup(bot): bot.add_cog(Owner(bot))
34
112
0.562818
import json import os import sys import disnake from disnake.ext import commands from disnake.ext.commands import Context from helpers import json_manager, checks import logging if not os.path.isfile("../config.json"): sys.exit("'config.json' not found by general-normal! Please add it and try again.") else: with open("../config.json") as file: config = json.load(file) logger = logging.getLogger('discord') logger.setLevel(logging.INFO) handler = logging.FileHandler(filename='../logs/discord.log', encoding='utf-8',mode='w') handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) logger.addHandler(handler) class Owner(commands.Cog, name="owner-normal"): def __init__(self, bot): self.bot = bot @commands.command( name="shutdown", description="Make the bot shutdown.", ) @checks.is_owner() async def shutdown(self, context: Context): embed = disnake.Embed( description="Shutting down. Bye! :wave:", color=0x9C84EF ) logger.info(f"Shutting down. Bye! :wave:") await context.send(embed=embed) await self.bot.close() @commands.command( name="say", description="The bot will say anything you want.", ) @checks.is_owner() async def say(self, context: Context, *, message: str): logger.info(f"Saying '{message}'") await context.send(message) @commands.command( name="embed", description="The bot will say anything you want, but within embeds.", ) @checks.is_owner() async def embed(self, context: Context, *, message: str): embed = disnake.Embed( description=message, color=0x9C84EF ) logger.info(f"Saying '{message}'") await context.send(embed=embed) @commands.group( name="blacklist" ) async def blacklist(self, context: Context): if context.invoked_subcommand is None: with open("../data/blacklist.json") as file: blacklist = json.load(file) embed = disnake.Embed( title=f"There are currently {len(blacklist['ids'])} blacklisted IDs", description=f"{', '.join(str(id) for id in blacklist['ids'])}", color=0x9C84EF ) await context.send(embed=embed) @blacklist.command( name="add" ) async def blacklist_add(self, context: Context, member: disnake.Member = None): try: user_id = member.id with open("../data/blacklist.json") as file: blacklist = json.load(file) if user_id in blacklist['ids']: embed = disnake.Embed( title="Error!", description=f"**{member.name}** is already in the blacklist.", color=0xE02B2B ) return await context.send(embed=embed) json_manager.add_user_to_blacklist(user_id) embed = disnake.Embed( title="User Blacklisted", description=f"**{member.name}** has been successfully added to the blacklist", color=0x9C84EF ) with open("../data/blacklist.json") as file: blacklist = json.load(file) embed.set_footer( text=f"There are now {len(blacklist['ids'])} users in the blacklist" ) logger.info(f"{member.name} has been added to the blacklist.") await context.send(embed=embed) except: embed = disnake.Embed( title="Error!", description=f"An unknown error occurred when trying to add **{member.name}** to the blacklist.", color=0xE02B2B ) await context.send(embed=embed) @blacklist.command( name="remove" ) async def blacklist_remove(self, context, member: disnake.Member = None): try: user_id = member.id json_manager.remove_user_from_blacklist(user_id) embed = disnake.Embed( title="User removed from blacklist", description=f"**{member.name}** has been successfully removed from the blacklist", color=0x9C84EF ) with open("../data/blacklist.json") as file: blacklist = json.load(file) embed.set_footer( text=f"There are now {len(blacklist['ids'])} users in the blacklist" ) logger.info(f"{member.name} has been removed from the blacklist.") await context.send(embed=embed) except: embed = disnake.Embed( title="Error!", description=f"**{member.name}** is not in the blacklist.", color=0xE02B2B ) await context.send(embed=embed) def setup(bot): bot.add_cog(Owner(bot))
true
true
f703edede08ad35c0ea1f34204d1139fd51ad639
89
py
Python
markers/apps.py
pabulumm/neighbors
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
[ "BSD-3-Clause" ]
null
null
null
markers/apps.py
pabulumm/neighbors
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
[ "BSD-3-Clause" ]
null
null
null
markers/apps.py
pabulumm/neighbors
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
[ "BSD-3-Clause" ]
null
null
null
from django.apps import AppConfig class MarkersConfig(AppConfig): name = 'markers'
14.833333
33
0.752809
from django.apps import AppConfig class MarkersConfig(AppConfig): name = 'markers'
true
true
f703edfd294b009350efb017aa9f635fff7cb725
30,396
py
Python
treetopper/stand.py
zacharybeebe/treetopper
9302d9c482eb2209c516c79100be98614666f8c1
[ "MIT" ]
null
null
null
treetopper/stand.py
zacharybeebe/treetopper
9302d9c482eb2209c516c79100be98614666f8c1
[ "MIT" ]
null
null
null
treetopper/stand.py
zacharybeebe/treetopper
9302d9c482eb2209c516c79100be98614666f8c1
[ "MIT" ]
null
null
null
from os import ( startfile, getcwd ) from os.path import join from io import BytesIO from csv import ( writer, excel ) from openpyxl import ( Workbook, load_workbook ) from statistics import ( mean, variance, stdev ) from treetopper.plot import Plot from treetopper.timber import ( TimberQuick, TimberFull ) from treetopper.log import Log from treetopper.thin import ( ThinTPA, ThinBA, ThinRD ) from treetopper._exceptions import TargetDensityError from treetopper.fvs import FVS from treetopper._constants import ( math, ALL_SPECIES_NAMES, GRADE_SORT, LOG_LENGTHS, SORTED_HEADS ) from treetopper._utils import ( format_comma, format_pct, extension_check, reorder_dict, check_date, add_logs_to_table_heads ) from treetopper._import_from_sheets import import_from_sheet from treetopper._print_console import ( print_stand_species, print_stand_logs, print_stand_stats ) from treetopper._print_pdf import PDF class Stand(object): """The Stand Class represents a stand of timber that has had an inventory conducted on it. It should made up of plots (Plot Class) which contain trees (Timber Classes). The Stand class will run calculations and statistics of the current stand conditions and it will run calculations of the log merchantabilty for three metrics: logs per acre, log board feet per acre, and log cubic feet per acre, based on log grades, log length ranges and species. """ def __init__(self, name: str, plot_factor: float, acres: float = None, inventory_date: str = None): self.name = name.upper() self.plot_factor = plot_factor self.plots = [] self.plot_count = 0 self.tpa = 0 self.ba_ac = 0 self.qmd = 0 self.rd_ac = 0 self.bf_ac = 0 self.cf_ac = 0 self.avg_hgt = 0 self.hdr = 0 self.vbar = 0 self.tpa_stats = {} self.ba_ac_stats = {} self.rd_ac_stats = {} self.bf_ac_stats = {} self.cf_ac_stats = {} self.species = {} self.species_gross = {} self.species_stats = {} self.logs = {} self.table_data = [] self.summary_stand = [] self.summary_logs = {} self.summary_stats = [] self.metrics = ['tpa', 'ba_ac', 'rd_ac', 'bf_ac', 'cf_ac'] self.attrs = ['_gross', '_stats', ''] self.acres = acres if inventory_date: self.inv_date = check_date(inventory_date) else: self.inv_date = inventory_date def __getitem__(self, attribute: str): return self.__dict__[attribute] def get_stand_table_text(self): """Returns a console-formatted string of current stand conditions""" return print_stand_species(self.summary_stand) def get_logs_table_text(self): """Returns a console-formatted string of stand logs data""" return print_stand_logs(self.summary_logs) def get_stats_table_text(self): """Returns and console-formatted string of stand stand statistics""" return print_stand_stats(self.summary_stats) def get_console_report_text(self): """Returns a console-formatted string of the complete stand report""" return self._compile_report_text() def console_report(self): """Prints a console-formatted string of the complete stand report""" print(self._compile_report_text()) def get_pdf_report_bytes_io(self): pdf = self._compile_pdf_report() return BytesIO(pdf.output(dest='S').encode('latin-1')) def pdf_report(self, filename: str, directory: str = None, start_file_upon_creation: bool = False): """Exports a pdf of the complete stand report to a user specified directory or if directory is None, to the current working directory. Will open the created pdf report if start_file_upon_creation is True""" check = extension_check(filename, '.pdf') if directory: file = join(directory, check) else: file = join(getcwd(), check) pdf = self._compile_pdf_report() pdf.output(file, 'F') if start_file_upon_creation: startfile(file) def add_plot(self, plot: Plot): """Adds a plot to the stand's plots list and re-runs the calculations and statistics of the stand. plot argument needs to be the a Plot Class""" self.plots.append(plot) self.plot_count += 1 for met in self.metrics: self._update_metrics(met) self.qmd = math.sqrt((self.ba_ac / self.tpa) / .005454) self.vbar = self.bf_ac / self.ba_ac self._update_species(plot) self._update_logs(plot) self.table_data = self._update_table_data() self.summary_stand = self._update_summary_stand() self.summary_logs = self._update_summary_logs() self.summary_stats = self._update_summary_stats() def import_sheet_quick(self, file_path: str): """Imports tree and plot data from a CSV or XLSX file for a quick cruise and adds that data to the stand""" plots = import_from_sheet(file_path, self.name, 'q') for plot_num in plots: plot = Plot() for tree in plots[plot_num]: plot.add_tree(TimberQuick(self.plot_factor, *tree)) self.add_plot(plot) def import_sheet_full(self, file_path: str): """Imports tree and plot data from a CSV or XLSX file for a full cruise and adds that data to the stand""" plots = import_from_sheet(file_path, self.name, 'f') for plot_num in plots: plot = Plot() for tree_data in plots[plot_num]: args = tree_data[: -1] logs = tree_data[-1] tree = TimberFull(self.plot_factor, *args) for log in logs: tree.add_log(*log) plot.add_tree(tree) self.add_plot(plot) def table_to_csv(self, filename: str, directory: str = None): """Creates or appends a CSV file with tree data from self.table_data""" check = extension_check(filename, '.csv') if directory: file = join(directory, check) else: file = join(getcwd(), check) if isfile(file): allow = 'a' start = 1 else: allow = 'w' start = 0 with open(file, allow, newline='') as csv_file: csv_write = writer(csv_file, dialect=excel) for i in self.table_data[start:]: csv_write.writerow(i) def table_to_excel(self, filename: str, directory: str = None): """Creates or appends an Excel file with tree data from self.table_data""" check = extension_check(filename, '.xlsx') if directory: file = join(directory, check) else: file = join(getcwd(), check) if isfile(file): wb = load_workbook(file) ws = wb.active for i in self.table_data[1:]: ws.append(i) wb.save(file) else: wb = Workbook() ws = wb.active for i in self.table_data: ws.append(i) wb.save(file) def _update_metrics(self, metric: str): """Updates stand metrics based on the metric entered in the argument, used internally""" metric_list = [plot[metric] for plot in self.plots] stats = self._get_stats(metric_list) setattr(self, metric, stats['mean']) setattr(self, f'{metric}_stats', stats) def _update_species(self, plot): """Re-runs stand conditions calculations and statistics, used internally""" update_after = ['qmd', 'vbar', 'avg_hgt', 'hdr'] if self.plot_count == 0: return else: for species in plot.species: if species not in self.species_gross: for attr in self.attrs: if attr == '_gross': getattr(self, f'species{attr}')[species] = {met: [] for met in self.metrics} else: getattr(self, f'species{attr}')[species] = {met: 0 for met in self.metrics} for key in plot.species[species]: if key not in update_after: self.species_gross[species][key].append(plot.species[species][key]) for species in self.species_gross: for key in self.species_gross[species]: if key not in update_after: data = self.species_gross[species][key] if len(data) < self.plot_count: data += ([0] * (self.plot_count - len(data))) stats = self._get_stats(data) self.species[species][key] = stats['mean'] self.species_stats[species][key] = stats self.species[species]['qmd'] = math.sqrt((self.species[species]['ba_ac'] / self.species[species]['tpa']) / 0.005454) self.species[species]['vbar'] = self.species[species]['bf_ac'] / self.species[species]['ba_ac'] if species == 'totals_all': self.species[species]['avg_hgt'] = mean([p.avg_hgt for p in self.plots]) self.species[species]['hdr'] = mean([p.hdr for p in self.plots]) else: trees = [] for p in self.plots: for t in p.trees: trees.append(t) self.species[species]['avg_hgt'] = mean([t.height for t in trees if t.species == species]) self.species[species]['hdr'] = mean([t.hdr for t in trees if t.species == species]) def _update_logs(self, plot): """Re-runs stand logs calculations, used internally""" if self.plot_count == 0: return else: subs = ['lpa', 'bf_ac', 'cf_ac'] for species in plot.logs: if species not in self.logs: self.logs[species] = {} for grade in plot.logs[species]: if grade not in self.logs[species]: self.logs[species][grade] = {rng: {sub: {'gross': [], 'mean': 0} for sub in subs} for rng in LOG_LENGTHS} self.logs[species][grade]['totals_by_grade'] = {sub: {'gross': [], 'mean': 0} for sub in subs} for rng in plot.logs[species][grade]: if rng != 'display': for sub in subs: self.logs[species][grade][rng][sub]['gross'].append(plot.logs[species][grade][rng][sub]) for species in self.logs: for grade in self.logs[species]: for rng in self.logs[species][grade]: for sub in subs: gross = self.logs[species][grade][rng][sub]['gross'] if len(gross) < self.plot_count: gross += ([0] * (self.plot_count - len(gross))) self.logs[species][grade][rng][sub]['mean'] = mean(gross) def _update_table_data(self): """Converts stand data to plot/tree inventory data table layout, used internally""" heads = ['Stand', 'Plot Number', 'Tree Number', 'Species', 'DBH', 'Height', 'Stump Height', 'Log 1 Length', 'Log 1 Grade', 'Log 1 Defect', 'Between Logs Feet'] master = [] max_logs = [] for i, plot in enumerate(self.plots): for j, tree in enumerate(plot.trees): temp = [self.name, i + 1, j + 1] for key in ['species', 'dbh', 'height']: temp.append(tree[key]) len_logs = len(tree.logs) max_logs.append(len_logs) for k, lnum in enumerate(tree.logs): log = tree.logs[lnum] if lnum == 1: temp.append(log.stem_height - log.length - 1) for lkey in ['length', 'grade', 'defect']: temp.append(log[lkey]) if k < len(tree.logs) - 1: between = tree.logs[lnum+1].stem_height - log.stem_height - tree.logs[lnum+1].length - 1 if between < 0: temp.append(0) else: temp.append(between) master.append(temp) heads += add_logs_to_table_heads(max(max_logs)) len_heads = len(heads) for i in master: len_i = len(i) if len_i < len_heads: i += ['' for j in range(len_heads - len_i)] master.insert(0, heads) return master def _update_summary_stand(self): """Updates the current stand conditions list of stand.summary_stand, used internally""" heads = ['SPECIES'] + [head[1] for head in SORTED_HEADS] body_data = [] for key in self.species: if key == 'totals_all': show = 'TOTALS' else: show = key temp = [str(show)] + [format_comma(self.species[key][i[0]]) for i in SORTED_HEADS] body_data.append(temp) body_data.append(body_data.pop(0)) body_data.insert(0, heads) return body_data def _update_summary_logs(self): """Updates the stand logs summary dict, data-tables are broken down by metric type --> species, used internally. Example: self.summary_logs['BOARD FEET PER ACRE']['DF'] --> data table""" table_data = {} tables = [['bf_ac', 'BOARD FEET PER ACRE'], ['cf_ac', 'CUBIC FEET PER ACRE'], ['lpa', 'LOGS PER ACRE']] for table in tables: metric_key = table[0] key = table[1] table_data[key] = {} for species in self.logs: if species == 'totals_all': show = 'TOTALS' else: show = ALL_SPECIES_NAMES[species] table_data[key][show] = [['LOG GRADES'] + [rng.upper() for rng in LOG_LENGTHS] + ['TOTALS']] grade_sort = [] for grade in self.logs[species]: values = [self.logs[species][grade][rng][metric_key]['mean'] for rng in self.logs[species][grade]] if sum(values) > 0: if grade == 'totals_by_length': col_text = 'TOTALS' else: col_text = grade grade_sort.append([col_text] + [format_comma(z) for z in values]) grade_sort = sorted(grade_sort, key=lambda x: GRADE_SORT[x[0]]) for g in grade_sort: table_data[key][show].append(g) table_data[key] = reorder_dict(table_data[key]) return table_data def _update_summary_stats(self): """Updates the stand statistics dict, stats-tables are broken down by species, used internally. Example: self.summary_stats['DF'] --> stats-table""" tables = {} for spp in self.species_stats: if spp == 'totals_all': show = 'TOTALS' else: show = ALL_SPECIES_NAMES[spp] tables[show] = [['METRIC'] + [head.upper() for head in self.species_stats[spp]['tpa'] if head != 'low_avg_high'] + ['LOW', 'AVERAGE', 'HIGH']] for key in self.species_stats[spp]: temp = [key.upper()] not_enough_data = False for sub in self.species_stats[spp][key]: x = self.species_stats[spp][key][sub] if not_enough_data: if x == 'Not enough data': if sub == 'low_avg_high': for i in range(3): temp.append('-') else: temp.append('-') else: if x == 'Not enough data': temp.append(x) not_enough_data = True else: if sub == 'low_avg_high': for i in x: temp.append(format_comma(i)) elif sub == 'stderr_pct': temp.append(format_pct(x)) else: temp.append(format_comma(x)) tables[show].append(temp) return reorder_dict(tables) def _get_stats(self, data): """Runs the statistical calculations on a set of the stand conditions data, returns an updated sub dict, used internally""" m = mean(data) if len(data) >= 2: std = stdev(data) ste = std / math.sqrt(self.plot_count) low_avg_high = [max(round(m - ste, 1), 0), m, m + ste] d = {'mean': m, 'variance': variance(data), 'stdev': std, 'stderr': ste, 'stderr_pct': (ste / m) * 100, 'low_avg_high': low_avg_high} else: d = {'mean': m, 'variance': 'Not enough data', 'stdev': 'Not enough data', 'stderr': 'Not enough data', 'stderr_pct': 'Not enough data', 'low_avg_high': 'Not enough data'} return d def _compile_report_text(self): """Compiles the console-formatted report of all stand data and stats, used internally""" n = '\n' * 4 console_text = f'{print_stand_species(self.summary_stand)}{n}' console_text += f'{print_stand_logs(self.summary_logs)}{n}' console_text += f'{print_stand_stats(self.summary_stats)}' return console_text def _compile_pdf_report(self): pdf = PDF() pdf.alias_nb_pages() pdf.add_page() pdf.compile_stand_report(self) return pdf if __name__ == '__main__': import argparse import traceback import sys from os import mkdir, getcwd from os.path import join, isfile, isdir, expanduser from treetopper._utils import get_desktop_path def make_dir_and_subdir(workflow_num): desktop = get_desktop_path() tt_dir = join(desktop, 'treetopper_outputs') if not isdir(tt_dir): mkdir(tt_dir) wf_dir = join(tt_dir, f'workflow_{workflow_num}') if not isdir(wf_dir): mkdir(wf_dir) return wf_dir def get_package_path(filename): path = None for i in sys.path: if 'AppData' in i and i[-13:] == 'site-packages': path = i break tt_path = join(path, 'treetopper') sheet_path = join(tt_path, 'example_csv_and_xlsx') final = join(sheet_path, filename) return final parser = argparse.ArgumentParser(description='treetopper Example Workflows') parser.add_argument('workflow_number', help='Enter the number of the workflow to run.\n Valid workflow numbers: 1, 2, 3, 4, 5, 6)') args = parser.parse_args() wf = args.workflow_number while True: if wf not in ['1', '2', '3', '4', '5', '6']: print('Please enter a workflow number 1, 2, 3, 4, 5, or 6') wf = input('Workflow #: ') else: break wf = int(wf) def workflow_1(workflow_number): stand = Stand('WF1', -20) plot_factor = stand.plot_factor tree_data = [ # Plot 1 [TimberQuick(plot_factor, 'DF', 29.5, 119), TimberQuick(plot_factor, 'WH', 18.9, 102), TimberQuick(plot_factor, 'WH', 20.2, 101), TimberQuick(plot_factor, 'WH', 19.9, 100), TimberQuick(plot_factor, 'DF', 20.6, 112)], # Plot 2 [TimberQuick(plot_factor, 'DF', 25.0, 117), TimberQuick(plot_factor, 'DF', 14.3, 105), TimberQuick(plot_factor, 'DF', 20.4, 119), TimberQuick(plot_factor, 'DF', 16.0, 108), TimberQuick(plot_factor, 'RC', 20.2, 124), TimberQuick(plot_factor, 'RC', 19.5, 116), TimberQuick(plot_factor, 'RC', 23.4, 121), TimberQuick(plot_factor, 'DF', 17.8, 116), TimberQuick(plot_factor, 'DF', 22.3, 125)] ] for trees in tree_data: plot = Plot() for tree in trees: plot.add_tree(tree) stand.add_plot(plot) path = make_dir_and_subdir(workflow_number) stand.console_report() stand.table_to_csv(join(path, 'example_csv_export.csv')) thin80tpa = ThinTPA(stand, 80) thin80tpa.console_report() end_message = """**WORKFLOW 1 created a QUICK CRUISE stand from manually entered tree data. It then ran a thinning scenario with a target density of 80 Trees per Acre considering all species and diameter ranges. Outputs: Stand console report in terminal [print(stand_class.console_report)] ^above^ Thinning console report in terminal [print(thin_class.console_report))] ^above^ Plot data .csv "example_csv_export.csv" in desktop/treetopper_outputs/workflow_1/ """ print(f'\n\n{end_message}') def workflow_2(workflow_number): stand = Stand('WF2', 33.3) plot_factor = stand.plot_factor tree_data = [ # Plot 1 [[TimberFull(plot_factor, 'DF', 29.5, 119), [[42, 40, 'S2', 5], [83, 40, 'S3', 0], [102, 18, 'S4', 10]]], [TimberFull(plot_factor, 'WH', 18.9, 102), [[42, 40, 'S2', 0], [79, 36, 'S4', 5]]], [TimberFull(plot_factor, 'WH', 20.2, 101), [[42, 40, 'S2', 5], [83, 40, 'S4', 0]]], [TimberFull(plot_factor, 'WH', 19.9, 100), [[42, 40, 'S2', 0], [83, 40, 'S4', 15]]], [TimberFull(plot_factor, 'DF', 20.6, 112), [[42, 40, 'S2', 0], [83, 40, 'S3', 5], [100, 16, 'UT', 10]]]], # Plot 2 [[TimberFull(plot_factor, 'DF', 25.0, 117), [[42, 40, 'SM', 0], [83, 40, 'S3', 5], [100, 16, 'S4', 0]]], [TimberFull(plot_factor, 'DF', 14.3, 105), [[42, 40, 'S3', 0], [79, 36, 'S4', 0]]], [TimberFull(plot_factor, 'DF', 20.4, 119), [[42, 40, 'S2', 5], [83, 40, 'S3', 5], [100, 16, 'S4', 5]]], [TimberFull(plot_factor, 'DF', 16.0, 108), [[42, 40, 'S3', 5], [83, 40, 'S3', 10]]], [TimberFull(plot_factor, 'RC', 20.2, 124), [[42, 40, 'CR', 5], [83, 40, 'CR', 5], [104, 20, 'CR', 5]]], [TimberFull(plot_factor, 'RC', 19.5, 116), [[42, 40, 'CR', 10], [83, 40, 'CR', 5], [100, 16, 'CR', 0]]], [TimberFull(plot_factor, 'RC', 23.4, 121), [[42, 40, 'CR', 0], [83, 40, 'CR', 0], [106, 22, 'CR', 5]]], [TimberFull(plot_factor, 'DF', 17.8, 116), [[42, 40, 'S2', 0], [83, 40, 'S3', 0], [100, 16, 'S4', 10]]], [TimberFull(plot_factor, 'DF', 22.3, 125), [[42, 40, 'SM', 0], [83, 40, 'S3', 5], [108, 24, 'S4', 0]]]] ] for trees in tree_data: plot = Plot() for tree, logs in trees: for log in logs: tree.add_log(*log) plot.add_tree(tree) stand.add_plot(plot) path = make_dir_and_subdir(workflow_number) stand.console_report() stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) thin120ba = ThinBA(stand, 120, species_to_cut=['DF', 'WH']) thin120ba.console_report() end_message = """**WORKFLOW 2 created a FULL CRUISE stand from manually entered tree data. It then ran a thinning scenario with a target density of 120 Basal Area per Acre harvesting only DF and WH considering all diameter ranges. Outputs: Stand console report in terminal [print(stand_class.console_report)] ^above^ Thinning console report in terminal [print(thin_class.console_report))] ^above^ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_2/ """ print(f'\n\n{end_message}') def workflow_3(workflow_number): path = make_dir_and_subdir(workflow_number) stand = Stand('EX4', -30) stand.import_sheet_quick(get_package_path('Example_Excel_quick.xlsx')) stand.console_report() stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) thin25rd = ThinRD(stand, 25, species_to_cut=['DF', 'WH'], min_dbh_to_cut=10, max_dbh_to_cut=18) thin25rd.console_report() end_message = """**WORKFLOW 3 created a QUICK CRUISE stand from importing plot data from an excel sheet. It then ran a thinning scenario with a target density of 25 Relative Density per Acre harvesting only DF and WH, with a minimum dbh of 10 inches and a maximum dbh of 18 inches. ** Note this thinning density won't be able to be achieved fully because our parameters don't allow for the needed harvest density, but this is to illustrate that the thinning will let the user know how much density was taken and how much more is needed to achieve the desired density target Outputs: Stand console report in terminal [print(stand_class.console_report)] ^above^ Thinning console report in terminal [print(thin_class.console_report))] ^above^ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_3/ """ print(f'\n\n{end_message}') def workflow_4(workflow_number): path = make_dir_and_subdir(workflow_number) stand = Stand('OK2', 46.94) stand.import_sheet_full(get_package_path('Example_CSV_full.csv')) stand.console_report() stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) try: thin100tpa = ThinTPA(stand, 100) thin100tpa.console_report() except TargetDensityError as e: print(traceback.format_exc()) end_message = """**WORKFLOW 4 created a FULL CRUISE stand from importing plot data from an csv sheet. It then ran a thinning scenario with a target density of 100 Trees per Acre considering all species and diameter ranges. ** Note this thinning density is greater than the current stand density and the Thin Class will throw a TargetDensityError exception which will explain what went wrong. Outputs: Stand console report in terminal [print(stand_class.console_report)] ^above^ Thinning console report in terminal [print(thin_class.console_report))] ^above^ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_4/ """ print(f'\n\n{end_message}') def workflow_5(workflow_number): path = make_dir_and_subdir(workflow_number) stand = Stand('EX3', 33.3) stand.import_sheet_quick(get_package_path('Example_CSV_quick.csv')) stand.pdf_report(join(path, 'stand_report.pdf')) stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) thin140ba = ThinBA(stand, 140, species_to_cut=['DF', 'WH', 'RA'], max_dbh_to_cut=24) thin140ba.pdf_report(join(path, 'thin_report.pdf')) end_message = """**WORKFLOW 5 created a QUICK CRUISE stand from importing plot data from an csv sheet. It then ran a thinning scenario with a target density of 140 Basal Area per Acre harvesting only DF, WH and RA with a maximum diameter of 24 inches. Outputs: Stand PDF report "stand_report.pdf" from [stand_class.pdf_report()] in desktop/treetopper_outputs/workflow_5/ Thinning PDF report "thin_report.pdf" from [thin_class.pdf_report()] in desktop/treetopper_outputs/workflow_5/ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_5/ """ print(f'\n\n{end_message}') def workflow_6(workflow_number): path = make_dir_and_subdir(workflow_number) stand = Stand('OK1', -30) stand.import_sheet_full(get_package_path('Example_Excel_full.xlsx')) stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) fvs = FVS() fvs.set_stand(stand, 'PN', 612, 6, 45, 'DF', 110) fvs.access_db('access_db', directory=path) fvs.sqlite_db('sqlite_db', directory=path) fvs.excel_db('excel_db', directory=path) end_message = """**WORKFLOW 6 created a FULL CRUISE stand from importing plot data from an excel sheet. It then ran the FVS module to create FVS formatted databases from the stand data. FVS is the US Forest Service's Forest Vegetation Simulator. Outputs: FVS Access database "access_db.db" from [fvs_class.access_db()] in desktop/treetopper_outputs/workflow_6/ FVS Suppose file "Suppose.loc" in desktop/treetopper_outputs/workflow_6/. ** FVS Legacy needs a .loc file along with the database. FVS SQLite database "sqlite_db.db" from [fvs_class.sqlite_db()] in desktop/treetopper_outputs/workflow_6/ FVS Excel database "excel_db.db" from [fvs_class.excel_db()] in desktop/treetopper_outputs/workflow_6/ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_6/ """ print(f'\n\n{end_message}') def main(workflow_number): opts = { 1: workflow_1, 2: workflow_2, 3: workflow_3, 4: workflow_4, 5: workflow_5, 6: workflow_6 } opts[workflow_number](workflow_number) print(f"\n\n{'-' * 200}\n\n") main(wf) print(f"\n\n{'-' * 200}\n\n")
41.46794
150
0.565469
from os import ( startfile, getcwd ) from os.path import join from io import BytesIO from csv import ( writer, excel ) from openpyxl import ( Workbook, load_workbook ) from statistics import ( mean, variance, stdev ) from treetopper.plot import Plot from treetopper.timber import ( TimberQuick, TimberFull ) from treetopper.log import Log from treetopper.thin import ( ThinTPA, ThinBA, ThinRD ) from treetopper._exceptions import TargetDensityError from treetopper.fvs import FVS from treetopper._constants import ( math, ALL_SPECIES_NAMES, GRADE_SORT, LOG_LENGTHS, SORTED_HEADS ) from treetopper._utils import ( format_comma, format_pct, extension_check, reorder_dict, check_date, add_logs_to_table_heads ) from treetopper._import_from_sheets import import_from_sheet from treetopper._print_console import ( print_stand_species, print_stand_logs, print_stand_stats ) from treetopper._print_pdf import PDF class Stand(object): def __init__(self, name: str, plot_factor: float, acres: float = None, inventory_date: str = None): self.name = name.upper() self.plot_factor = plot_factor self.plots = [] self.plot_count = 0 self.tpa = 0 self.ba_ac = 0 self.qmd = 0 self.rd_ac = 0 self.bf_ac = 0 self.cf_ac = 0 self.avg_hgt = 0 self.hdr = 0 self.vbar = 0 self.tpa_stats = {} self.ba_ac_stats = {} self.rd_ac_stats = {} self.bf_ac_stats = {} self.cf_ac_stats = {} self.species = {} self.species_gross = {} self.species_stats = {} self.logs = {} self.table_data = [] self.summary_stand = [] self.summary_logs = {} self.summary_stats = [] self.metrics = ['tpa', 'ba_ac', 'rd_ac', 'bf_ac', 'cf_ac'] self.attrs = ['_gross', '_stats', ''] self.acres = acres if inventory_date: self.inv_date = check_date(inventory_date) else: self.inv_date = inventory_date def __getitem__(self, attribute: str): return self.__dict__[attribute] def get_stand_table_text(self): return print_stand_species(self.summary_stand) def get_logs_table_text(self): return print_stand_logs(self.summary_logs) def get_stats_table_text(self): return print_stand_stats(self.summary_stats) def get_console_report_text(self): return self._compile_report_text() def console_report(self): print(self._compile_report_text()) def get_pdf_report_bytes_io(self): pdf = self._compile_pdf_report() return BytesIO(pdf.output(dest='S').encode('latin-1')) def pdf_report(self, filename: str, directory: str = None, start_file_upon_creation: bool = False): check = extension_check(filename, '.pdf') if directory: file = join(directory, check) else: file = join(getcwd(), check) pdf = self._compile_pdf_report() pdf.output(file, 'F') if start_file_upon_creation: startfile(file) def add_plot(self, plot: Plot): self.plots.append(plot) self.plot_count += 1 for met in self.metrics: self._update_metrics(met) self.qmd = math.sqrt((self.ba_ac / self.tpa) / .005454) self.vbar = self.bf_ac / self.ba_ac self._update_species(plot) self._update_logs(plot) self.table_data = self._update_table_data() self.summary_stand = self._update_summary_stand() self.summary_logs = self._update_summary_logs() self.summary_stats = self._update_summary_stats() def import_sheet_quick(self, file_path: str): plots = import_from_sheet(file_path, self.name, 'q') for plot_num in plots: plot = Plot() for tree in plots[plot_num]: plot.add_tree(TimberQuick(self.plot_factor, *tree)) self.add_plot(plot) def import_sheet_full(self, file_path: str): plots = import_from_sheet(file_path, self.name, 'f') for plot_num in plots: plot = Plot() for tree_data in plots[plot_num]: args = tree_data[: -1] logs = tree_data[-1] tree = TimberFull(self.plot_factor, *args) for log in logs: tree.add_log(*log) plot.add_tree(tree) self.add_plot(plot) def table_to_csv(self, filename: str, directory: str = None): check = extension_check(filename, '.csv') if directory: file = join(directory, check) else: file = join(getcwd(), check) if isfile(file): allow = 'a' start = 1 else: allow = 'w' start = 0 with open(file, allow, newline='') as csv_file: csv_write = writer(csv_file, dialect=excel) for i in self.table_data[start:]: csv_write.writerow(i) def table_to_excel(self, filename: str, directory: str = None): check = extension_check(filename, '.xlsx') if directory: file = join(directory, check) else: file = join(getcwd(), check) if isfile(file): wb = load_workbook(file) ws = wb.active for i in self.table_data[1:]: ws.append(i) wb.save(file) else: wb = Workbook() ws = wb.active for i in self.table_data: ws.append(i) wb.save(file) def _update_metrics(self, metric: str): metric_list = [plot[metric] for plot in self.plots] stats = self._get_stats(metric_list) setattr(self, metric, stats['mean']) setattr(self, f'{metric}_stats', stats) def _update_species(self, plot): update_after = ['qmd', 'vbar', 'avg_hgt', 'hdr'] if self.plot_count == 0: return else: for species in plot.species: if species not in self.species_gross: for attr in self.attrs: if attr == '_gross': getattr(self, f'species{attr}')[species] = {met: [] for met in self.metrics} else: getattr(self, f'species{attr}')[species] = {met: 0 for met in self.metrics} for key in plot.species[species]: if key not in update_after: self.species_gross[species][key].append(plot.species[species][key]) for species in self.species_gross: for key in self.species_gross[species]: if key not in update_after: data = self.species_gross[species][key] if len(data) < self.plot_count: data += ([0] * (self.plot_count - len(data))) stats = self._get_stats(data) self.species[species][key] = stats['mean'] self.species_stats[species][key] = stats self.species[species]['qmd'] = math.sqrt((self.species[species]['ba_ac'] / self.species[species]['tpa']) / 0.005454) self.species[species]['vbar'] = self.species[species]['bf_ac'] / self.species[species]['ba_ac'] if species == 'totals_all': self.species[species]['avg_hgt'] = mean([p.avg_hgt for p in self.plots]) self.species[species]['hdr'] = mean([p.hdr for p in self.plots]) else: trees = [] for p in self.plots: for t in p.trees: trees.append(t) self.species[species]['avg_hgt'] = mean([t.height for t in trees if t.species == species]) self.species[species]['hdr'] = mean([t.hdr for t in trees if t.species == species]) def _update_logs(self, plot): if self.plot_count == 0: return else: subs = ['lpa', 'bf_ac', 'cf_ac'] for species in plot.logs: if species not in self.logs: self.logs[species] = {} for grade in plot.logs[species]: if grade not in self.logs[species]: self.logs[species][grade] = {rng: {sub: {'gross': [], 'mean': 0} for sub in subs} for rng in LOG_LENGTHS} self.logs[species][grade]['totals_by_grade'] = {sub: {'gross': [], 'mean': 0} for sub in subs} for rng in plot.logs[species][grade]: if rng != 'display': for sub in subs: self.logs[species][grade][rng][sub]['gross'].append(plot.logs[species][grade][rng][sub]) for species in self.logs: for grade in self.logs[species]: for rng in self.logs[species][grade]: for sub in subs: gross = self.logs[species][grade][rng][sub]['gross'] if len(gross) < self.plot_count: gross += ([0] * (self.plot_count - len(gross))) self.logs[species][grade][rng][sub]['mean'] = mean(gross) def _update_table_data(self): heads = ['Stand', 'Plot Number', 'Tree Number', 'Species', 'DBH', 'Height', 'Stump Height', 'Log 1 Length', 'Log 1 Grade', 'Log 1 Defect', 'Between Logs Feet'] master = [] max_logs = [] for i, plot in enumerate(self.plots): for j, tree in enumerate(plot.trees): temp = [self.name, i + 1, j + 1] for key in ['species', 'dbh', 'height']: temp.append(tree[key]) len_logs = len(tree.logs) max_logs.append(len_logs) for k, lnum in enumerate(tree.logs): log = tree.logs[lnum] if lnum == 1: temp.append(log.stem_height - log.length - 1) for lkey in ['length', 'grade', 'defect']: temp.append(log[lkey]) if k < len(tree.logs) - 1: between = tree.logs[lnum+1].stem_height - log.stem_height - tree.logs[lnum+1].length - 1 if between < 0: temp.append(0) else: temp.append(between) master.append(temp) heads += add_logs_to_table_heads(max(max_logs)) len_heads = len(heads) for i in master: len_i = len(i) if len_i < len_heads: i += ['' for j in range(len_heads - len_i)] master.insert(0, heads) return master def _update_summary_stand(self): heads = ['SPECIES'] + [head[1] for head in SORTED_HEADS] body_data = [] for key in self.species: if key == 'totals_all': show = 'TOTALS' else: show = key temp = [str(show)] + [format_comma(self.species[key][i[0]]) for i in SORTED_HEADS] body_data.append(temp) body_data.append(body_data.pop(0)) body_data.insert(0, heads) return body_data def _update_summary_logs(self): table_data = {} tables = [['bf_ac', 'BOARD FEET PER ACRE'], ['cf_ac', 'CUBIC FEET PER ACRE'], ['lpa', 'LOGS PER ACRE']] for table in tables: metric_key = table[0] key = table[1] table_data[key] = {} for species in self.logs: if species == 'totals_all': show = 'TOTALS' else: show = ALL_SPECIES_NAMES[species] table_data[key][show] = [['LOG GRADES'] + [rng.upper() for rng in LOG_LENGTHS] + ['TOTALS']] grade_sort = [] for grade in self.logs[species]: values = [self.logs[species][grade][rng][metric_key]['mean'] for rng in self.logs[species][grade]] if sum(values) > 0: if grade == 'totals_by_length': col_text = 'TOTALS' else: col_text = grade grade_sort.append([col_text] + [format_comma(z) for z in values]) grade_sort = sorted(grade_sort, key=lambda x: GRADE_SORT[x[0]]) for g in grade_sort: table_data[key][show].append(g) table_data[key] = reorder_dict(table_data[key]) return table_data def _update_summary_stats(self): tables = {} for spp in self.species_stats: if spp == 'totals_all': show = 'TOTALS' else: show = ALL_SPECIES_NAMES[spp] tables[show] = [['METRIC'] + [head.upper() for head in self.species_stats[spp]['tpa'] if head != 'low_avg_high'] + ['LOW', 'AVERAGE', 'HIGH']] for key in self.species_stats[spp]: temp = [key.upper()] not_enough_data = False for sub in self.species_stats[spp][key]: x = self.species_stats[spp][key][sub] if not_enough_data: if x == 'Not enough data': if sub == 'low_avg_high': for i in range(3): temp.append('-') else: temp.append('-') else: if x == 'Not enough data': temp.append(x) not_enough_data = True else: if sub == 'low_avg_high': for i in x: temp.append(format_comma(i)) elif sub == 'stderr_pct': temp.append(format_pct(x)) else: temp.append(format_comma(x)) tables[show].append(temp) return reorder_dict(tables) def _get_stats(self, data): m = mean(data) if len(data) >= 2: std = stdev(data) ste = std / math.sqrt(self.plot_count) low_avg_high = [max(round(m - ste, 1), 0), m, m + ste] d = {'mean': m, 'variance': variance(data), 'stdev': std, 'stderr': ste, 'stderr_pct': (ste / m) * 100, 'low_avg_high': low_avg_high} else: d = {'mean': m, 'variance': 'Not enough data', 'stdev': 'Not enough data', 'stderr': 'Not enough data', 'stderr_pct': 'Not enough data', 'low_avg_high': 'Not enough data'} return d def _compile_report_text(self): n = '\n' * 4 console_text = f'{print_stand_species(self.summary_stand)}{n}' console_text += f'{print_stand_logs(self.summary_logs)}{n}' console_text += f'{print_stand_stats(self.summary_stats)}' return console_text def _compile_pdf_report(self): pdf = PDF() pdf.alias_nb_pages() pdf.add_page() pdf.compile_stand_report(self) return pdf if __name__ == '__main__': import argparse import traceback import sys from os import mkdir, getcwd from os.path import join, isfile, isdir, expanduser from treetopper._utils import get_desktop_path def make_dir_and_subdir(workflow_num): desktop = get_desktop_path() tt_dir = join(desktop, 'treetopper_outputs') if not isdir(tt_dir): mkdir(tt_dir) wf_dir = join(tt_dir, f'workflow_{workflow_num}') if not isdir(wf_dir): mkdir(wf_dir) return wf_dir def get_package_path(filename): path = None for i in sys.path: if 'AppData' in i and i[-13:] == 'site-packages': path = i break tt_path = join(path, 'treetopper') sheet_path = join(tt_path, 'example_csv_and_xlsx') final = join(sheet_path, filename) return final parser = argparse.ArgumentParser(description='treetopper Example Workflows') parser.add_argument('workflow_number', help='Enter the number of the workflow to run.\n Valid workflow numbers: 1, 2, 3, 4, 5, 6)') args = parser.parse_args() wf = args.workflow_number while True: if wf not in ['1', '2', '3', '4', '5', '6']: print('Please enter a workflow number 1, 2, 3, 4, 5, or 6') wf = input('Workflow #: ') else: break wf = int(wf) def workflow_1(workflow_number): stand = Stand('WF1', -20) plot_factor = stand.plot_factor tree_data = [ [TimberQuick(plot_factor, 'DF', 29.5, 119), TimberQuick(plot_factor, 'WH', 18.9, 102), TimberQuick(plot_factor, 'WH', 20.2, 101), TimberQuick(plot_factor, 'WH', 19.9, 100), TimberQuick(plot_factor, 'DF', 20.6, 112)], [TimberQuick(plot_factor, 'DF', 25.0, 117), TimberQuick(plot_factor, 'DF', 14.3, 105), TimberQuick(plot_factor, 'DF', 20.4, 119), TimberQuick(plot_factor, 'DF', 16.0, 108), TimberQuick(plot_factor, 'RC', 20.2, 124), TimberQuick(plot_factor, 'RC', 19.5, 116), TimberQuick(plot_factor, 'RC', 23.4, 121), TimberQuick(plot_factor, 'DF', 17.8, 116), TimberQuick(plot_factor, 'DF', 22.3, 125)] ] for trees in tree_data: plot = Plot() for tree in trees: plot.add_tree(tree) stand.add_plot(plot) path = make_dir_and_subdir(workflow_number) stand.console_report() stand.table_to_csv(join(path, 'example_csv_export.csv')) thin80tpa = ThinTPA(stand, 80) thin80tpa.console_report() end_message = """**WORKFLOW 1 created a QUICK CRUISE stand from manually entered tree data. It then ran a thinning scenario with a target density of 80 Trees per Acre considering all species and diameter ranges. Outputs: Stand console report in terminal [print(stand_class.console_report)] ^above^ Thinning console report in terminal [print(thin_class.console_report))] ^above^ Plot data .csv "example_csv_export.csv" in desktop/treetopper_outputs/workflow_1/ """ print(f'\n\n{end_message}') def workflow_2(workflow_number): stand = Stand('WF2', 33.3) plot_factor = stand.plot_factor tree_data = [ [[TimberFull(plot_factor, 'DF', 29.5, 119), [[42, 40, 'S2', 5], [83, 40, 'S3', 0], [102, 18, 'S4', 10]]], [TimberFull(plot_factor, 'WH', 18.9, 102), [[42, 40, 'S2', 0], [79, 36, 'S4', 5]]], [TimberFull(plot_factor, 'WH', 20.2, 101), [[42, 40, 'S2', 5], [83, 40, 'S4', 0]]], [TimberFull(plot_factor, 'WH', 19.9, 100), [[42, 40, 'S2', 0], [83, 40, 'S4', 15]]], [TimberFull(plot_factor, 'DF', 20.6, 112), [[42, 40, 'S2', 0], [83, 40, 'S3', 5], [100, 16, 'UT', 10]]]], [[TimberFull(plot_factor, 'DF', 25.0, 117), [[42, 40, 'SM', 0], [83, 40, 'S3', 5], [100, 16, 'S4', 0]]], [TimberFull(plot_factor, 'DF', 14.3, 105), [[42, 40, 'S3', 0], [79, 36, 'S4', 0]]], [TimberFull(plot_factor, 'DF', 20.4, 119), [[42, 40, 'S2', 5], [83, 40, 'S3', 5], [100, 16, 'S4', 5]]], [TimberFull(plot_factor, 'DF', 16.0, 108), [[42, 40, 'S3', 5], [83, 40, 'S3', 10]]], [TimberFull(plot_factor, 'RC', 20.2, 124), [[42, 40, 'CR', 5], [83, 40, 'CR', 5], [104, 20, 'CR', 5]]], [TimberFull(plot_factor, 'RC', 19.5, 116), [[42, 40, 'CR', 10], [83, 40, 'CR', 5], [100, 16, 'CR', 0]]], [TimberFull(plot_factor, 'RC', 23.4, 121), [[42, 40, 'CR', 0], [83, 40, 'CR', 0], [106, 22, 'CR', 5]]], [TimberFull(plot_factor, 'DF', 17.8, 116), [[42, 40, 'S2', 0], [83, 40, 'S3', 0], [100, 16, 'S4', 10]]], [TimberFull(plot_factor, 'DF', 22.3, 125), [[42, 40, 'SM', 0], [83, 40, 'S3', 5], [108, 24, 'S4', 0]]]] ] for trees in tree_data: plot = Plot() for tree, logs in trees: for log in logs: tree.add_log(*log) plot.add_tree(tree) stand.add_plot(plot) path = make_dir_and_subdir(workflow_number) stand.console_report() stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) thin120ba = ThinBA(stand, 120, species_to_cut=['DF', 'WH']) thin120ba.console_report() end_message = """**WORKFLOW 2 created a FULL CRUISE stand from manually entered tree data. It then ran a thinning scenario with a target density of 120 Basal Area per Acre harvesting only DF and WH considering all diameter ranges. Outputs: Stand console report in terminal [print(stand_class.console_report)] ^above^ Thinning console report in terminal [print(thin_class.console_report))] ^above^ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_2/ """ print(f'\n\n{end_message}') def workflow_3(workflow_number): path = make_dir_and_subdir(workflow_number) stand = Stand('EX4', -30) stand.import_sheet_quick(get_package_path('Example_Excel_quick.xlsx')) stand.console_report() stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) thin25rd = ThinRD(stand, 25, species_to_cut=['DF', 'WH'], min_dbh_to_cut=10, max_dbh_to_cut=18) thin25rd.console_report() end_message = """**WORKFLOW 3 created a QUICK CRUISE stand from importing plot data from an excel sheet. It then ran a thinning scenario with a target density of 25 Relative Density per Acre harvesting only DF and WH, with a minimum dbh of 10 inches and a maximum dbh of 18 inches. ** Note this thinning density won't be able to be achieved fully because our parameters don't allow for the needed harvest density, but this is to illustrate that the thinning will let the user know how much density was taken and how much more is needed to achieve the desired density target Outputs: Stand console report in terminal [print(stand_class.console_report)] ^above^ Thinning console report in terminal [print(thin_class.console_report))] ^above^ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_3/ """ print(f'\n\n{end_message}') def workflow_4(workflow_number): path = make_dir_and_subdir(workflow_number) stand = Stand('OK2', 46.94) stand.import_sheet_full(get_package_path('Example_CSV_full.csv')) stand.console_report() stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) try: thin100tpa = ThinTPA(stand, 100) thin100tpa.console_report() except TargetDensityError as e: print(traceback.format_exc()) end_message = """**WORKFLOW 4 created a FULL CRUISE stand from importing plot data from an csv sheet. It then ran a thinning scenario with a target density of 100 Trees per Acre considering all species and diameter ranges. ** Note this thinning density is greater than the current stand density and the Thin Class will throw a TargetDensityError exception which will explain what went wrong. Outputs: Stand console report in terminal [print(stand_class.console_report)] ^above^ Thinning console report in terminal [print(thin_class.console_report))] ^above^ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_4/ """ print(f'\n\n{end_message}') def workflow_5(workflow_number): path = make_dir_and_subdir(workflow_number) stand = Stand('EX3', 33.3) stand.import_sheet_quick(get_package_path('Example_CSV_quick.csv')) stand.pdf_report(join(path, 'stand_report.pdf')) stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) thin140ba = ThinBA(stand, 140, species_to_cut=['DF', 'WH', 'RA'], max_dbh_to_cut=24) thin140ba.pdf_report(join(path, 'thin_report.pdf')) end_message = """**WORKFLOW 5 created a QUICK CRUISE stand from importing plot data from an csv sheet. It then ran a thinning scenario with a target density of 140 Basal Area per Acre harvesting only DF, WH and RA with a maximum diameter of 24 inches. Outputs: Stand PDF report "stand_report.pdf" from [stand_class.pdf_report()] in desktop/treetopper_outputs/workflow_5/ Thinning PDF report "thin_report.pdf" from [thin_class.pdf_report()] in desktop/treetopper_outputs/workflow_5/ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_5/ """ print(f'\n\n{end_message}') def workflow_6(workflow_number): path = make_dir_and_subdir(workflow_number) stand = Stand('OK1', -30) stand.import_sheet_full(get_package_path('Example_Excel_full.xlsx')) stand.table_to_excel(join(path, 'example_xlsx_export.xlsx')) fvs = FVS() fvs.set_stand(stand, 'PN', 612, 6, 45, 'DF', 110) fvs.access_db('access_db', directory=path) fvs.sqlite_db('sqlite_db', directory=path) fvs.excel_db('excel_db', directory=path) end_message = """**WORKFLOW 6 created a FULL CRUISE stand from importing plot data from an excel sheet. It then ran the FVS module to create FVS formatted databases from the stand data. FVS is the US Forest Service's Forest Vegetation Simulator. Outputs: FVS Access database "access_db.db" from [fvs_class.access_db()] in desktop/treetopper_outputs/workflow_6/ FVS Suppose file "Suppose.loc" in desktop/treetopper_outputs/workflow_6/. ** FVS Legacy needs a .loc file along with the database. FVS SQLite database "sqlite_db.db" from [fvs_class.sqlite_db()] in desktop/treetopper_outputs/workflow_6/ FVS Excel database "excel_db.db" from [fvs_class.excel_db()] in desktop/treetopper_outputs/workflow_6/ Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_6/ """ print(f'\n\n{end_message}') def main(workflow_number): opts = { 1: workflow_1, 2: workflow_2, 3: workflow_3, 4: workflow_4, 5: workflow_5, 6: workflow_6 } opts[workflow_number](workflow_number) print(f"\n\n{'-' * 200}\n\n") main(wf) print(f"\n\n{'-' * 200}\n\n")
true
true
f703ee65ebc49d049639276ee2bcc8f8f67095eb
992
py
Python
pyclopedia/p01_beginner/p03_data_structure/p02_list/p02_slice_operator.py
MacHu-GWU/pyclopedia-project
c6ee156eb40bc5a4ac5f51aa735b6fd004cb68ee
[ "MIT" ]
null
null
null
pyclopedia/p01_beginner/p03_data_structure/p02_list/p02_slice_operator.py
MacHu-GWU/pyclopedia-project
c6ee156eb40bc5a4ac5f51aa735b6fd004cb68ee
[ "MIT" ]
null
null
null
pyclopedia/p01_beginner/p03_data_structure/p02_list/p02_slice_operator.py
MacHu-GWU/pyclopedia-project
c6ee156eb40bc5a4ac5f51aa735b6fd004cb68ee
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- def example1(): """Slice operator. seq[::stride] # [seq[0], seq[stride], ..., seq[-1] ] seq[low::stride] # [seq[low], seq[low+stride], ..., seq[-1] ] seq[:high:stride] # [seq[0], seq[stride], ..., seq[high-1]] seq[low:high:stride] # [seq[low], seq[low+stride], ..., seq[high-1]] """ l = list("01234567") assert l[::2] == list("0246") # 从 index(0) 开始, 隔2个取一个 assert l[1::2] == list("1357") # 从 index(1) 开始, 隔2个取一个 assert l[:4:2] == list("02") # 从头开始到 index(4-1) 为止,隔2个取一个 assert l[2:6:2] == list("24") # 从index(2)开始到index(6-1)为止,隔2个取一个 example1() def example2(): """Reversed slice operator """ l = list("01234567") assert l[::-1] == list("76543210") # 从最后一个开始,逆序排列 assert l[::-2] == list("7531") # 从最后一个开始,隔2个取一个 assert l[-2::-2] == list("6420") # 从-2开始,隔2个取一个 assert l[:3:-2] == list("75") # 从最后开始,到3为止,隔2个取一个 example2()
29.176471
73
0.519153
def example1(): l = list("01234567") assert l[::2] == list("0246") assert l[1::2] == list("1357") assert l[:4:2] == list("02") assert l[2:6:2] == list("24") example1() def example2(): l = list("01234567") assert l[::-1] == list("76543210") assert l[::-2] == list("7531") assert l[-2::-2] == list("6420") assert l[:3:-2] == list("75") example2()
true
true
f703ee7b1e643155e7210026f57c5e7574579547
12,192
py
Python
salt/utils/dockermod/__init__.py
markgras/salt
d66cd3c935533c63870b83228b978ce43e0ef70d
[ "Apache-2.0" ]
9,425
2015-01-01T05:59:24.000Z
2022-03-31T20:44:05.000Z
salt/utils/dockermod/__init__.py
markgras/salt
d66cd3c935533c63870b83228b978ce43e0ef70d
[ "Apache-2.0" ]
33,507
2015-01-01T00:19:56.000Z
2022-03-31T23:48:20.000Z
salt/utils/dockermod/__init__.py
markgras/salt
d66cd3c935533c63870b83228b978ce43e0ef70d
[ "Apache-2.0" ]
5,810
2015-01-01T19:11:45.000Z
2022-03-31T02:37:20.000Z
""" Common logic used by the docker state and execution module This module contains logic to accommodate docker/salt CLI usage, as well as input as formatted by states. """ import copy import logging import salt.utils.args import salt.utils.data import salt.utils.dockermod.translate from salt.exceptions import CommandExecutionError, SaltInvocationError from salt.utils.args import get_function_argspec as _argspec from salt.utils.dockermod.translate.helpers import split as _split try: import docker except ImportError: docker = None # These next two imports are only necessary to have access to the needed # functions so that we can get argspecs for the container config, host config, # and networking config (see the get_client_args() function). try: import docker.types except ImportError: pass try: import docker.utils except ImportError: pass NOTSET = object() __virtualname__ = "docker" # Default timeout as of docker-py 1.0.0 CLIENT_TIMEOUT = 60 # Timeout for stopping the container, before a kill is invoked SHUTDOWN_TIMEOUT = 10 log = logging.getLogger(__name__) def __virtual__(): if docker is None: return False return __virtualname__ def get_client_args(limit=None): if docker is None: raise CommandExecutionError("docker Python module not imported") limit = salt.utils.args.split_input(limit or []) ret = {} if not limit or any( x in limit for x in ("create_container", "host_config", "connect_container_to_network") ): try: ret["create_container"] = _argspec(docker.APIClient.create_container).args except AttributeError: try: ret["create_container"] = _argspec(docker.Client.create_container).args except AttributeError: raise CommandExecutionError("Coult not get create_container argspec") try: ret["host_config"] = _argspec(docker.types.HostConfig.__init__).args except AttributeError: try: ret["host_config"] = _argspec(docker.utils.create_host_config).args except AttributeError: raise CommandExecutionError("Could not get create_host_config argspec") try: ret["connect_container_to_network"] = _argspec( docker.types.EndpointConfig.__init__ ).args except AttributeError: try: ret["connect_container_to_network"] = _argspec( docker.utils.utils.create_endpoint_config ).args except AttributeError: try: ret["connect_container_to_network"] = _argspec( docker.utils.create_endpoint_config ).args except AttributeError: raise CommandExecutionError( "Could not get connect_container_to_network argspec" ) for key, wrapped_func in ( ("logs", docker.api.container.ContainerApiMixin.logs), ("create_network", docker.api.network.NetworkApiMixin.create_network), ): if not limit or key in limit: try: func_ref = wrapped_func try: # functools.wraps makes things a little easier in Python 3 ret[key] = _argspec(func_ref.__wrapped__).args except AttributeError: # functools.wraps changed (unlikely), bail out ret[key] = [] except AttributeError: # Function moved, bail out ret[key] = [] if not limit or "ipam_config" in limit: try: ret["ipam_config"] = _argspec(docker.types.IPAMPool.__init__).args except AttributeError: try: ret["ipam_config"] = _argspec(docker.utils.create_ipam_pool).args except AttributeError: raise CommandExecutionError("Could not get ipam args") for item in ret: # The API version is passed automagically by the API code that imports # these classes/functions and is not an arg that we will be passing, so # remove it if present. Similarly, don't include "self" if it shows up # in the arglist. for argname in ("version", "self"): try: ret[item].remove(argname) except ValueError: pass # Remove any args in host or endpoint config from the create_container # arglist. This keeps us from accidentally allowing args that docker-py has # moved from the create_container function to the either the host or # endpoint config. for item in ("host_config", "connect_container_to_network"): for val in ret.get(item, []): try: ret["create_container"].remove(val) except ValueError: # Arg is not in create_container arglist pass for item in ("create_container", "host_config", "connect_container_to_network"): if limit and item not in limit: ret.pop(item, None) try: ret["logs"].remove("container") except (KeyError, ValueError, TypeError): pass return ret def translate_input( translator, skip_translate=None, ignore_collisions=False, validate_ip_addrs=True, **kwargs ): """ Translate CLI/SLS input into the format the API expects. The ``translator`` argument must be a module containing translation functions, within salt.utils.dockermod.translate. A ``skip_translate`` kwarg can be passed to control which arguments are translated. It can be either a comma-separated list or an iterable containing strings (e.g. a list or tuple), and members of that tuple will have their translation skipped. Optionally, skip_translate can be set to True to skip *all* translation. """ kwargs = copy.deepcopy(salt.utils.args.clean_kwargs(**kwargs)) invalid = {} collisions = [] if skip_translate is True: # Skip all translation return kwargs else: if not skip_translate: skip_translate = () else: try: skip_translate = _split(skip_translate) except AttributeError: pass if not hasattr(skip_translate, "__iter__"): log.error("skip_translate is not an iterable, ignoring") skip_translate = () try: # Using list(kwargs) here because if there are any invalid arguments we # will be popping them from the kwargs. for key in list(kwargs): real_key = translator.ALIASES.get(key, key) if real_key in skip_translate: continue # ipam_pools is designed to be passed as a list of actual # dictionaries, but if each of the dictionaries passed has a single # element, it will be incorrectly repacked. if key != "ipam_pools" and salt.utils.data.is_dictlist(kwargs[key]): kwargs[key] = salt.utils.data.repack_dictlist(kwargs[key]) try: kwargs[key] = getattr(translator, real_key)( kwargs[key], validate_ip_addrs=validate_ip_addrs, skip_translate=skip_translate, ) except AttributeError: log.debug("No translation function for argument '%s'", key) continue except SaltInvocationError as exc: kwargs.pop(key) invalid[key] = exc.strerror try: translator._merge_keys(kwargs) except AttributeError: pass # Convert CLI versions of commands to their docker-py counterparts for key in translator.ALIASES: if key in kwargs: new_key = translator.ALIASES[key] value = kwargs.pop(key) if new_key in kwargs: collisions.append(new_key) else: kwargs[new_key] = value try: translator._post_processing(kwargs, skip_translate, invalid) except AttributeError: pass except Exception as exc: # pylint: disable=broad-except error_message = exc.__str__() log.error("Error translating input: '%s'", error_message, exc_info=True) else: error_message = None error_data = {} if error_message is not None: error_data["error_message"] = error_message if invalid: error_data["invalid"] = invalid if collisions and not ignore_collisions: for item in collisions: error_data.setdefault("collisions", []).append( "'{}' is an alias for '{}', they cannot both be used".format( translator.ALIASES_REVMAP[item], item ) ) if error_data: raise CommandExecutionError("Failed to translate input", info=error_data) return kwargs def create_ipam_config(*pools, **kwargs): """ Builds an IP address management (IPAM) config dictionary """ kwargs = salt.utils.args.clean_kwargs(**kwargs) try: # docker-py 2.0 and newer pool_args = salt.utils.args.get_function_argspec( docker.types.IPAMPool.__init__ ).args create_pool = docker.types.IPAMPool create_config = docker.types.IPAMConfig except AttributeError: # docker-py < 2.0 pool_args = salt.utils.args.get_function_argspec( docker.utils.create_ipam_pool ).args create_pool = docker.utils.create_ipam_pool create_config = docker.utils.create_ipam_config for primary_key, alias_key in (("driver", "ipam_driver"), ("options", "ipam_opts")): if alias_key in kwargs: alias_val = kwargs.pop(alias_key) if primary_key in kwargs: log.warning( "docker.create_ipam_config: Both '%s' and '%s' " "passed. Ignoring '%s'", alias_key, primary_key, alias_key, ) else: kwargs[primary_key] = alias_val if salt.utils.data.is_dictlist(kwargs.get("options")): kwargs["options"] = salt.utils.data.repack_dictlist(kwargs["options"]) # Get all of the IPAM pool args that were passed as individual kwargs # instead of in the *pools tuple pool_kwargs = {} for key in list(kwargs): if key in pool_args: pool_kwargs[key] = kwargs.pop(key) pool_configs = [] if pool_kwargs: pool_configs.append(create_pool(**pool_kwargs)) pool_configs.extend([create_pool(**pool) for pool in pools]) if pool_configs: # Sanity check the IPAM pools. docker-py's type/function for creating # an IPAM pool will allow you to create a pool with a gateway, IP # range, or map of aux addresses, even when no subnet is passed. # However, attempting to use this IPAM pool when creating the network # will cause the Docker Engine to throw an error. if any("Subnet" not in pool for pool in pool_configs): raise SaltInvocationError("A subnet is required in each IPAM pool") else: kwargs["pool_configs"] = pool_configs ret = create_config(**kwargs) pool_dicts = ret.get("Config") if pool_dicts: # When you inspect a network with custom IPAM configuration, only # arguments which were explictly passed are reflected. By contrast, # docker-py will include keys for arguments which were not passed in # but set the value to None. Thus, for ease of comparison, the below # loop will remove all keys with a value of None from the generated # pool configs. for idx, _ in enumerate(pool_dicts): for key in list(pool_dicts[idx]): if pool_dicts[idx][key] is None: del pool_dicts[idx][key] return ret
35.44186
88
0.613845
import copy import logging import salt.utils.args import salt.utils.data import salt.utils.dockermod.translate from salt.exceptions import CommandExecutionError, SaltInvocationError from salt.utils.args import get_function_argspec as _argspec from salt.utils.dockermod.translate.helpers import split as _split try: import docker except ImportError: docker = None try: import docker.types except ImportError: pass try: import docker.utils except ImportError: pass NOTSET = object() __virtualname__ = "docker" CLIENT_TIMEOUT = 60 SHUTDOWN_TIMEOUT = 10 log = logging.getLogger(__name__) def __virtual__(): if docker is None: return False return __virtualname__ def get_client_args(limit=None): if docker is None: raise CommandExecutionError("docker Python module not imported") limit = salt.utils.args.split_input(limit or []) ret = {} if not limit or any( x in limit for x in ("create_container", "host_config", "connect_container_to_network") ): try: ret["create_container"] = _argspec(docker.APIClient.create_container).args except AttributeError: try: ret["create_container"] = _argspec(docker.Client.create_container).args except AttributeError: raise CommandExecutionError("Coult not get create_container argspec") try: ret["host_config"] = _argspec(docker.types.HostConfig.__init__).args except AttributeError: try: ret["host_config"] = _argspec(docker.utils.create_host_config).args except AttributeError: raise CommandExecutionError("Could not get create_host_config argspec") try: ret["connect_container_to_network"] = _argspec( docker.types.EndpointConfig.__init__ ).args except AttributeError: try: ret["connect_container_to_network"] = _argspec( docker.utils.utils.create_endpoint_config ).args except AttributeError: try: ret["connect_container_to_network"] = _argspec( docker.utils.create_endpoint_config ).args except AttributeError: raise CommandExecutionError( "Could not get connect_container_to_network argspec" ) for key, wrapped_func in ( ("logs", docker.api.container.ContainerApiMixin.logs), ("create_network", docker.api.network.NetworkApiMixin.create_network), ): if not limit or key in limit: try: func_ref = wrapped_func try: ret[key] = _argspec(func_ref.__wrapped__).args except AttributeError: ret[key] = [] except AttributeError: ret[key] = [] if not limit or "ipam_config" in limit: try: ret["ipam_config"] = _argspec(docker.types.IPAMPool.__init__).args except AttributeError: try: ret["ipam_config"] = _argspec(docker.utils.create_ipam_pool).args except AttributeError: raise CommandExecutionError("Could not get ipam args") for item in ret: # in the arglist. for argname in ("version", "self"): try: ret[item].remove(argname) except ValueError: pass # Remove any args in host or endpoint config from the create_container # arglist. This keeps us from accidentally allowing args that docker-py has # moved from the create_container function to the either the host or # endpoint config. for item in ("host_config", "connect_container_to_network"): for val in ret.get(item, []): try: ret["create_container"].remove(val) except ValueError: # Arg is not in create_container arglist pass for item in ("create_container", "host_config", "connect_container_to_network"): if limit and item not in limit: ret.pop(item, None) try: ret["logs"].remove("container") except (KeyError, ValueError, TypeError): pass return ret def translate_input( translator, skip_translate=None, ignore_collisions=False, validate_ip_addrs=True, **kwargs ): kwargs = copy.deepcopy(salt.utils.args.clean_kwargs(**kwargs)) invalid = {} collisions = [] if skip_translate is True: # Skip all translation return kwargs else: if not skip_translate: skip_translate = () else: try: skip_translate = _split(skip_translate) except AttributeError: pass if not hasattr(skip_translate, "__iter__"): log.error("skip_translate is not an iterable, ignoring") skip_translate = () try: # Using list(kwargs) here because if there are any invalid arguments we # will be popping them from the kwargs. for key in list(kwargs): real_key = translator.ALIASES.get(key, key) if real_key in skip_translate: continue # ipam_pools is designed to be passed as a list of actual # dictionaries, but if each of the dictionaries passed has a single # element, it will be incorrectly repacked. if key != "ipam_pools" and salt.utils.data.is_dictlist(kwargs[key]): kwargs[key] = salt.utils.data.repack_dictlist(kwargs[key]) try: kwargs[key] = getattr(translator, real_key)( kwargs[key], validate_ip_addrs=validate_ip_addrs, skip_translate=skip_translate, ) except AttributeError: log.debug("No translation function for argument '%s'", key) continue except SaltInvocationError as exc: kwargs.pop(key) invalid[key] = exc.strerror try: translator._merge_keys(kwargs) except AttributeError: pass # Convert CLI versions of commands to their docker-py counterparts for key in translator.ALIASES: if key in kwargs: new_key = translator.ALIASES[key] value = kwargs.pop(key) if new_key in kwargs: collisions.append(new_key) else: kwargs[new_key] = value try: translator._post_processing(kwargs, skip_translate, invalid) except AttributeError: pass except Exception as exc: # pylint: disable=broad-except error_message = exc.__str__() log.error("Error translating input: '%s'", error_message, exc_info=True) else: error_message = None error_data = {} if error_message is not None: error_data["error_message"] = error_message if invalid: error_data["invalid"] = invalid if collisions and not ignore_collisions: for item in collisions: error_data.setdefault("collisions", []).append( "'{}' is an alias for '{}', they cannot both be used".format( translator.ALIASES_REVMAP[item], item ) ) if error_data: raise CommandExecutionError("Failed to translate input", info=error_data) return kwargs def create_ipam_config(*pools, **kwargs): kwargs = salt.utils.args.clean_kwargs(**kwargs) try: # docker-py 2.0 and newer pool_args = salt.utils.args.get_function_argspec( docker.types.IPAMPool.__init__ ).args create_pool = docker.types.IPAMPool create_config = docker.types.IPAMConfig except AttributeError: # docker-py < 2.0 pool_args = salt.utils.args.get_function_argspec( docker.utils.create_ipam_pool ).args create_pool = docker.utils.create_ipam_pool create_config = docker.utils.create_ipam_config for primary_key, alias_key in (("driver", "ipam_driver"), ("options", "ipam_opts")): if alias_key in kwargs: alias_val = kwargs.pop(alias_key) if primary_key in kwargs: log.warning( "docker.create_ipam_config: Both '%s' and '%s' " "passed. Ignoring '%s'", alias_key, primary_key, alias_key, ) else: kwargs[primary_key] = alias_val if salt.utils.data.is_dictlist(kwargs.get("options")): kwargs["options"] = salt.utils.data.repack_dictlist(kwargs["options"]) # Get all of the IPAM pool args that were passed as individual kwargs # instead of in the *pools tuple pool_kwargs = {} for key in list(kwargs): if key in pool_args: pool_kwargs[key] = kwargs.pop(key) pool_configs = [] if pool_kwargs: pool_configs.append(create_pool(**pool_kwargs)) pool_configs.extend([create_pool(**pool) for pool in pools]) if pool_configs: # Sanity check the IPAM pools. docker-py's type/function for creating if any("Subnet" not in pool for pool in pool_configs): raise SaltInvocationError("A subnet is required in each IPAM pool") else: kwargs["pool_configs"] = pool_configs ret = create_config(**kwargs) pool_dicts = ret.get("Config") if pool_dicts: for idx, _ in enumerate(pool_dicts): for key in list(pool_dicts[idx]): if pool_dicts[idx][key] is None: del pool_dicts[idx][key] return ret
true
true
f703eede52c715495446e14a5f0c12b74f4ccf5b
2,834
py
Python
quantlab/COCO/YOLOv3Tiny/postprocess.py
lukasc-ch/QuantLab
7ddcc51ec1131a58269768cd898ce04e8b49beb6
[ "Apache-2.0" ]
6
2019-05-24T17:39:07.000Z
2021-11-06T22:19:55.000Z
quantlab/COCO/YOLOv3Tiny/postprocess.py
lukasc-ch/QuantLab
7ddcc51ec1131a58269768cd898ce04e8b49beb6
[ "Apache-2.0" ]
null
null
null
quantlab/COCO/YOLOv3Tiny/postprocess.py
lukasc-ch/QuantLab
7ddcc51ec1131a58269768cd898ce04e8b49beb6
[ "Apache-2.0" ]
4
2019-05-24T17:39:15.000Z
2021-04-02T07:13:11.000Z
# Copyright (c) 2019 UniMoRe, Matteo Spallanzani import torch from ..utils.utils import xywh2xyxy, bbox_iou def clip_boxes(boxes): boxes[:, [0, 2]] = boxes[:, [0, 2]].clamp(min=0, max=1) boxes[:, [1, 3]] = boxes[:, [1, 3]].clamp(min=0, max=1) def postprocess_pr(pr_outs, conf_thres=0.001, overlap_thres=0.5): """Restructure YOLOv3Tiny tensors into lists, then filter out non-maximal (redundant) annotations from the predictions.""" # pr_outs = [[bs, grid_positions, 85], [bs, 4*grid_positions, 85]] # when its two components are concatenated, we get a tensor [bs, 5*gridpositions, 85], which `bs` "slices" # have to be "stripped" to remove redundant components # strip each slice (corresponding to a single image in the batch) to get sequences of (possibly) different lengths: # the natural data structure to use to collect these sequences is a list pr_outs = [p.view(p.size(0), -1, p.size(-1)) for p in pr_outs] pr_outs = torch.cat(pr_outs, 1).detach().cpu() pr_labels = [None] * len(pr_outs) for img_id, pr in enumerate(pr_outs): # filter out irrelevant predictions pr_cls_prob, pr_cls_id = pr[:, 5:].max(1) pr[:, 4] *= pr_cls_prob i = (pr[:, 4] > conf_thres) & torch.isfinite(pr).all(1) pr = pr[i] if len(pr) == 0: continue pr_cls_prob = pr_cls_prob[i] pr_cls_id = pr_cls_id[i].unsqueeze(1).float() pr[:, :4] = xywh2xyxy(pr[:, :4]) pr = torch.cat((pr[:, :5], pr_cls_prob.unsqueeze(1), pr_cls_id), 1) pr = pr[(-pr[:, 4]).argsort()] detections = [] for c in pr[:, -1].unique(): pr_anno_c = pr[pr[:, -1] == c] n = len(pr_anno_c) if n == 1: detections.append(pr_anno_c) continue elif n > 100: pr_anno_c = pr_anno_c[:100] while len(pr_anno_c) > 0: if len(pr_anno_c) == 1: detections.append(pr_anno_c) break redundant = bbox_iou(pr_anno_c[0], pr_anno_c) > overlap_thres weights = pr_anno_c[redundant, 4:5] pr_anno_c[0, :4] = (weights * pr_anno_c[redundant, 0:4]).sum(0) / weights.sum() detections.append(pr_anno_c[0:1]) # keep leading dimension 1 for 1D tensor pr_anno_c = pr_anno_c[~redundant] if len(detections) > 0: detections = torch.cat(detections) clip_boxes(detections[:, :4]) pr_labels[img_id] = detections[(-detections[:, 4]).argsort()] return pr_labels def postprocess_gt(gt_labels): gt_labels = gt_labels.detach().cpu() bs = gt_labels[0, 0].to(torch.int) gt_labels = [gt_labels[gt_labels[:, 1] == i, 2:] for i in range(bs)] return gt_labels
42.298507
119
0.583275
import torch from ..utils.utils import xywh2xyxy, bbox_iou def clip_boxes(boxes): boxes[:, [0, 2]] = boxes[:, [0, 2]].clamp(min=0, max=1) boxes[:, [1, 3]] = boxes[:, [1, 3]].clamp(min=0, max=1) def postprocess_pr(pr_outs, conf_thres=0.001, overlap_thres=0.5): pr_outs = [p.view(p.size(0), -1, p.size(-1)) for p in pr_outs] pr_outs = torch.cat(pr_outs, 1).detach().cpu() pr_labels = [None] * len(pr_outs) for img_id, pr in enumerate(pr_outs): pr_cls_prob, pr_cls_id = pr[:, 5:].max(1) pr[:, 4] *= pr_cls_prob i = (pr[:, 4] > conf_thres) & torch.isfinite(pr).all(1) pr = pr[i] if len(pr) == 0: continue pr_cls_prob = pr_cls_prob[i] pr_cls_id = pr_cls_id[i].unsqueeze(1).float() pr[:, :4] = xywh2xyxy(pr[:, :4]) pr = torch.cat((pr[:, :5], pr_cls_prob.unsqueeze(1), pr_cls_id), 1) pr = pr[(-pr[:, 4]).argsort()] detections = [] for c in pr[:, -1].unique(): pr_anno_c = pr[pr[:, -1] == c] n = len(pr_anno_c) if n == 1: detections.append(pr_anno_c) continue elif n > 100: pr_anno_c = pr_anno_c[:100] while len(pr_anno_c) > 0: if len(pr_anno_c) == 1: detections.append(pr_anno_c) break redundant = bbox_iou(pr_anno_c[0], pr_anno_c) > overlap_thres weights = pr_anno_c[redundant, 4:5] pr_anno_c[0, :4] = (weights * pr_anno_c[redundant, 0:4]).sum(0) / weights.sum() detections.append(pr_anno_c[0:1]) pr_anno_c = pr_anno_c[~redundant] if len(detections) > 0: detections = torch.cat(detections) clip_boxes(detections[:, :4]) pr_labels[img_id] = detections[(-detections[:, 4]).argsort()] return pr_labels def postprocess_gt(gt_labels): gt_labels = gt_labels.detach().cpu() bs = gt_labels[0, 0].to(torch.int) gt_labels = [gt_labels[gt_labels[:, 1] == i, 2:] for i in range(bs)] return gt_labels
true
true
f703ef7c34d74366644a557d1ded65ad43afd065
12,320
py
Python
syft/frameworks/torch/mpc/fss.py
NicoSerranoP/PySyft
87fcd566c46fce4c16d363c94396dd26bd82a016
[ "Apache-2.0" ]
3
2020-11-24T05:15:57.000Z
2020-12-07T09:52:45.000Z
syft/frameworks/torch/mpc/fss.py
NicoSerranoP/PySyft
87fcd566c46fce4c16d363c94396dd26bd82a016
[ "Apache-2.0" ]
1
2020-09-29T00:24:31.000Z
2020-09-29T00:24:31.000Z
syft/frameworks/torch/mpc/fss.py
NicoSerranoP/PySyft
87fcd566c46fce4c16d363c94396dd26bd82a016
[ "Apache-2.0" ]
1
2021-09-04T16:27:41.000Z
2021-09-04T16:27:41.000Z
""" This is an implementation of Function Secret Sharing Useful papers are: - Function Secret Sharing- Improvements and Extensions, Boyle 2017 Link: https://eprint.iacr.org/2018/707.pdf - Secure Computation with Preprocessing via Function Secret Sharing, Boyle 2019 Link: https://eprint.iacr.org/2019/1095 Note that the protocols are quite different in aspect from those papers """ import hashlib import torch as th import syft as sy λ = 110 # 6 # 110 or 63 # security parameter n = 32 # 8 # 32 # bit precision dtype = th.int32 no_wrap = {"no_wrap": True} def initialize_crypto_plans(worker): """ This is called manually for the moment, to build the plan used to perform Function Secret Sharing on a specific worker. """ eq_plan_1 = sy.Plan( forward_func=lambda x, y: mask_builder(x, y, "eq"), owner=worker, tags=["#fss_eq_plan_1"], is_built=True, ) worker.register_obj(eq_plan_1) eq_plan_2 = sy.Plan( forward_func=eq_eval_plan, owner=worker, tags=["#fss_eq_plan_2"], is_built=True ) worker.register_obj(eq_plan_2) comp_plan_1 = sy.Plan( forward_func=lambda x, y: mask_builder(x, y, "comp"), owner=worker, tags=["#fss_comp_plan_1"], is_built=True, ) worker.register_obj(comp_plan_1) comp_plan_2 = sy.Plan( forward_func=comp_eval_plan, owner=worker, tags=["#fss_comp_plan_2"], is_built=True ) worker.register_obj(comp_plan_2) xor_add_plan = sy.Plan( forward_func=xor_add_convert_1, owner=worker, tags=["#xor_add_1"], is_built=True ) worker.register_obj(xor_add_plan) xor_add_plan = sy.Plan( forward_func=xor_add_convert_2, owner=worker, tags=["#xor_add_2"], is_built=True ) worker.register_obj(xor_add_plan) def request_run_plan(worker, plan_tag, location, return_value, args=(), kwargs={}): response_ids = (sy.ID_PROVIDER.pop(),) args = (args, response_ids) response = worker.send_command( cmd_name="run", target=plan_tag, recipient=location, return_ids=response_ids, return_value=return_value, kwargs_=kwargs, args_=args, ) return response def fss_op(x1, x2, type_op="eq"): """ Define the workflow for a binary operation using Function Secret Sharing Currently supported operand are = & <=, respectively corresponding to type_op = 'eq' and 'comp' Args: x1: first AST x2: second AST type_op: type of operation to perform, should be 'eq' or 'comp' Returns: shares of the comparison """ me = sy.local_worker locations = x1.locations shares = [] for location in locations: args = (x1.child[location.id], x2.child[location.id]) share = request_run_plan( me, f"#fss_{type_op}_plan_1", location, return_value=True, args=args ) shares.append(share) mask_value = sum(shares) % 2 ** n shares = [] for i, location in enumerate(locations): args = (th.IntTensor([i]), mask_value) share = request_run_plan( me, f"#fss_{type_op}_plan_2", location, return_value=False, args=args ) shares.append(share) if type_op == "comp": prev_shares = shares shares = [] for prev_share, location in zip(prev_shares, locations): share = request_run_plan( me, "#xor_add_1", location, return_value=True, args=(prev_share,) ) shares.append(share) masked_value = shares[0] ^ shares[1] # TODO case >2 workers ? shares = {} for i, prev_share, location in zip(range(len(locations)), prev_shares, locations): share = request_run_plan( me, "#xor_add_2", location, return_value=False, args=(th.IntTensor([i]), masked_value), ) shares[location.id] = share else: shares = {loc.id: share for loc, share in zip(locations, shares)} response = sy.AdditiveSharingTensor(shares, **x1.get_class_attributes()) return response # share level def mask_builder(x1, x2, type_op): x = x1 - x2 # Keep the primitive in store as we use it after alpha, s_0, *CW = x1.owner.crypto_store.get_keys( f"fss_{type_op}", n_instances=x1.numel(), remove=False ) return x + alpha.reshape(x.shape) # share level def eq_eval_plan(b, x_masked): alpha, s_0, *CW = x_masked.owner.crypto_store.get_keys( type_op="fss_eq", n_instances=x_masked.numel(), remove=True ) result_share = DPF.eval(b, x_masked, s_0, *CW) return result_share # share level def comp_eval_plan(b, x_masked): alpha, s_0, *CW = x_masked.owner.crypto_store.get_keys( type_op="fss_comp", n_instances=x_masked.numel(), remove=True ) result_share = DIF.eval(b, x_masked, s_0, *CW) return result_share def xor_add_convert_1(x): xor_share, add_share = x.owner.crypto_store.get_keys( type_op="xor_add_couple", n_instances=x.numel(), remove=False ) return x ^ xor_share.reshape(x.shape) def xor_add_convert_2(b, x): xor_share, add_share = x.owner.crypto_store.get_keys( type_op="xor_add_couple", n_instances=x.numel(), remove=True ) return add_share.reshape(x.shape) * (1 - 2 * x) + x * b def eq(x1, x2): return fss_op(x1, x2, "eq") def le(x1, x2): return fss_op(x1, x2, "comp") class DPF: """Distributed Point Function - used for equality""" def __init__(self): pass @staticmethod def keygen(n_values=1): beta = th.tensor([1], dtype=dtype) alpha = th.randint(0, 2 ** n, (n_values,)) α = bit_decomposition(alpha) s, t, CW = ( Array(n + 1, 2, λ, n_values), Array(n + 1, 2, n_values), Array(n, 2 * (λ + 1), n_values), ) s[0] = randbit(size=(2, λ, n_values)) t[0] = th.tensor([[0, 1]] * n_values, dtype=th.uint8).t() for i in range(0, n): g0 = G(s[i, 0]) g1 = G(s[i, 1]) # Re-use useless randomness sL_0, _, sR_0, _ = split(g0, [λ, 1, λ, 1]) sL_1, _, sR_1, _ = split(g1, [λ, 1, λ, 1]) s_rand = (sL_0 ^ sL_1) * α[i] + (sR_0 ^ sR_1) * (1 - α[i]) cw_i = TruthTableDPF(s_rand, α[i]) CW[i] = cw_i ^ g0 ^ g1 for b in (0, 1): τ = [g0, g1][b] ^ (t[i, b] * CW[i]) τ = τ.reshape(2, λ + 1, n_values) # filtered_τ = τ[𝛼[i]] OLD α_i = α[i].unsqueeze(0).expand(λ + 1, n_values).unsqueeze(0).long() filtered_τ = th.gather(τ, 0, α_i).squeeze(0) s[i + 1, b], t[i + 1, b] = split(filtered_τ, [λ, 1]) CW_n = (-1) ** t[n, 1].to(dtype) * (beta - Convert(s[n, 0]) + Convert(s[n, 1])) return (alpha,) + s[0].unbind() + (CW, CW_n) @staticmethod def eval(b, x, *k_b): original_shape = x.shape x = x.reshape(-1) n_values = x.shape[0] x = bit_decomposition(x) s, t = Array(n + 1, λ, n_values), Array(n + 1, 1, n_values) s[0] = k_b[0] # here k[1:] is (CW, CW_n) CW = k_b[1].unbind() + (k_b[2],) t[0] = b for i in range(0, n): τ = G(s[i]) ^ (t[i] * CW[i]) τ = τ.reshape(2, λ + 1, n_values) x_i = x[i].unsqueeze(0).expand(λ + 1, n_values).unsqueeze(0).long() filtered_τ = th.gather(τ, 0, x_i).squeeze(0) s[i + 1], t[i + 1] = split(filtered_τ, [λ, 1]) flat_result = (-1) ** b * (Convert(s[n]) + t[n].squeeze() * CW[n]) return flat_result.reshape(original_shape) class DIF: """Distributed Interval Function - used for comparison <=""" def __init__(self): pass @staticmethod def keygen(n_values=1): alpha = th.randint(0, 2 ** n, (n_values,)) α = bit_decomposition(alpha) s, t, CW = ( Array(n + 1, 2, λ, n_values), Array(n + 1, 2, n_values), Array(n, 2 + 2 * (λ + 1), n_values), ) s[0] = randbit(size=(2, λ, n_values)) t[0] = th.tensor([[0, 1]] * n_values, dtype=th.uint8).t() for i in range(0, n): h0 = H(s[i, 0]) h1 = H(s[i, 1]) # Re-use useless randomness _, _, sL_0, _, sR_0, _ = split(h0, [1, 1, λ, 1, λ, 1]) _, _, sL_1, _, sR_1, _ = split(h1, [1, 1, λ, 1, λ, 1]) s_rand = (sL_0 ^ sL_1) * α[i] + (sR_0 ^ sR_1) * (1 - α[i]) cw_i = TruthTableDIF(s_rand, α[i]) CW[i] = cw_i ^ h0 ^ h1 for b in (0, 1): τ = [h0, h1][b] ^ (t[i, b] * CW[i]) τ = τ.reshape(2, λ + 2, n_values) # filtered_τ = τ[𝛼[i]] OLD α_i = α[i].unsqueeze(0).expand(λ + 2, n_values).unsqueeze(0).long() filtered_τ = th.gather(τ, 0, α_i).squeeze(0) σ_leaf, s[i + 1, b], t[i + 1, b] = split(filtered_τ, [1, λ, 1]) return (alpha,) + s[0].unbind() + (CW,) @staticmethod def eval(b, x, *k_b): original_shape = x.shape x = x.reshape(-1) n_values = x.shape[0] x = bit_decomposition(x) FnOutput = Array(n + 1, n_values) s, t = Array(n + 1, λ, n_values), Array(n + 1, 1, n_values) s[0] = k_b[0] CW = k_b[1].unbind() t[0] = b for i in range(0, n): τ = H(s[i]) ^ (t[i] * CW[i]) τ = τ.reshape(2, λ + 2, n_values) x_i = x[i].unsqueeze(0).expand(λ + 2, n_values).unsqueeze(0).long() filtered_τ = th.gather(τ, 0, x_i).squeeze(0) σ_leaf, s[i + 1], t[i + 1] = split(filtered_τ, [1, λ, 1]) FnOutput[i] = σ_leaf # Last tour, the other σ is also a leaf: FnOutput[n] = t[n] flat_result = FnOutput.sum(axis=0) % 2 return flat_result.reshape(original_shape) # PRG def G(seed): assert seed.shape[0] == λ seed_t = seed.t().tolist() gen_list = [] for seed_bit in seed_t: enc_str = str(seed_bit).encode() h = hashlib.sha3_256(enc_str) r = h.digest() binary_str = bin(int.from_bytes(r, byteorder="big"))[2 : 2 + (2 * (λ + 1))] gen_list.append(list(map(int, binary_str))) return th.tensor(gen_list, dtype=th.uint8).t() def H(seed): assert seed.shape[0] == λ seed_t = seed.t().tolist() gen_list = [] for seed_bit in seed_t: enc_str = str(seed_bit).encode() h = hashlib.sha3_256(enc_str) r = h.digest() binary_str = bin(int.from_bytes(r, byteorder="big"))[2 : 2 + 2 + (2 * (λ + 1))] gen_list.append(list(map(int, binary_str))) return th.tensor(gen_list, dtype=th.uint8).t() def Convert(bits): bit_pow_lambda = th.flip(2 ** th.arange(λ), (0,)).unsqueeze(-1).to(th.long) return (bits.to(th.long) * bit_pow_lambda).sum(dim=0).to(dtype) def Array(*shape): return th.empty(shape, dtype=th.uint8) bit_pow_n = th.flip(2 ** th.arange(n), (0,)) def bit_decomposition(x): x = x.unsqueeze(-1) z = bit_pow_n & x z = z.t() return (z > 0).to(th.uint8) def randbit(size): return th.randint(2, size=size) def concat(*args, **kwargs): return th.cat(args, **kwargs) def split(x, idx): return th.split(x, idx) def TruthTableDPF(s, α_i): one = th.ones((1, s.shape[1])).to(th.uint8) s_one = concat(s, one) Table = th.zeros((2, λ + 1, len(α_i)), dtype=th.uint8) for j, el in enumerate(α_i): Table[el.item(), :, j] = s_one[:, j] return Table.reshape(-1, Table.shape[2]) def TruthTableDIF(s, α_i): leafTable = th.zeros((2, 1, len(α_i)), dtype=th.uint8) # TODO optimize: just put alpha on first line leaf_value = α_i for j, el in enumerate(α_i): leafTable[(1 - el).item(), 0, j] = leaf_value[j] one = th.ones((1, s.shape[1])).to(th.uint8) s_one = concat(s, one) nextTable = th.zeros((2, λ + 1, len(α_i)), dtype=th.uint8) for j, el in enumerate(α_i): nextTable[el.item(), :, j] = s_one[:, j] Table = concat(leafTable, nextTable, axis=1) Table = Table.reshape(-1, Table.shape[2]) return Table
30.419753
91
0.561607
import hashlib import torch as th import syft as sy λ = 110 n = 32 dtype = th.int32 no_wrap = {"no_wrap": True} def initialize_crypto_plans(worker): eq_plan_1 = sy.Plan( forward_func=lambda x, y: mask_builder(x, y, "eq"), owner=worker, tags=["#fss_eq_plan_1"], is_built=True, ) worker.register_obj(eq_plan_1) eq_plan_2 = sy.Plan( forward_func=eq_eval_plan, owner=worker, tags=["#fss_eq_plan_2"], is_built=True ) worker.register_obj(eq_plan_2) comp_plan_1 = sy.Plan( forward_func=lambda x, y: mask_builder(x, y, "comp"), owner=worker, tags=["#fss_comp_plan_1"], is_built=True, ) worker.register_obj(comp_plan_1) comp_plan_2 = sy.Plan( forward_func=comp_eval_plan, owner=worker, tags=["#fss_comp_plan_2"], is_built=True ) worker.register_obj(comp_plan_2) xor_add_plan = sy.Plan( forward_func=xor_add_convert_1, owner=worker, tags=["#xor_add_1"], is_built=True ) worker.register_obj(xor_add_plan) xor_add_plan = sy.Plan( forward_func=xor_add_convert_2, owner=worker, tags=["#xor_add_2"], is_built=True ) worker.register_obj(xor_add_plan) def request_run_plan(worker, plan_tag, location, return_value, args=(), kwargs={}): response_ids = (sy.ID_PROVIDER.pop(),) args = (args, response_ids) response = worker.send_command( cmd_name="run", target=plan_tag, recipient=location, return_ids=response_ids, return_value=return_value, kwargs_=kwargs, args_=args, ) return response def fss_op(x1, x2, type_op="eq"): me = sy.local_worker locations = x1.locations shares = [] for location in locations: args = (x1.child[location.id], x2.child[location.id]) share = request_run_plan( me, f"#fss_{type_op}_plan_1", location, return_value=True, args=args ) shares.append(share) mask_value = sum(shares) % 2 ** n shares = [] for i, location in enumerate(locations): args = (th.IntTensor([i]), mask_value) share = request_run_plan( me, f"#fss_{type_op}_plan_2", location, return_value=False, args=args ) shares.append(share) if type_op == "comp": prev_shares = shares shares = [] for prev_share, location in zip(prev_shares, locations): share = request_run_plan( me, "#xor_add_1", location, return_value=True, args=(prev_share,) ) shares.append(share) masked_value = shares[0] ^ shares[1] shares = {} for i, prev_share, location in zip(range(len(locations)), prev_shares, locations): share = request_run_plan( me, "#xor_add_2", location, return_value=False, args=(th.IntTensor([i]), masked_value), ) shares[location.id] = share else: shares = {loc.id: share for loc, share in zip(locations, shares)} response = sy.AdditiveSharingTensor(shares, **x1.get_class_attributes()) return response def mask_builder(x1, x2, type_op): x = x1 - x2 alpha, s_0, *CW = x1.owner.crypto_store.get_keys( f"fss_{type_op}", n_instances=x1.numel(), remove=False ) return x + alpha.reshape(x.shape) def eq_eval_plan(b, x_masked): alpha, s_0, *CW = x_masked.owner.crypto_store.get_keys( type_op="fss_eq", n_instances=x_masked.numel(), remove=True ) result_share = DPF.eval(b, x_masked, s_0, *CW) return result_share def comp_eval_plan(b, x_masked): alpha, s_0, *CW = x_masked.owner.crypto_store.get_keys( type_op="fss_comp", n_instances=x_masked.numel(), remove=True ) result_share = DIF.eval(b, x_masked, s_0, *CW) return result_share def xor_add_convert_1(x): xor_share, add_share = x.owner.crypto_store.get_keys( type_op="xor_add_couple", n_instances=x.numel(), remove=False ) return x ^ xor_share.reshape(x.shape) def xor_add_convert_2(b, x): xor_share, add_share = x.owner.crypto_store.get_keys( type_op="xor_add_couple", n_instances=x.numel(), remove=True ) return add_share.reshape(x.shape) * (1 - 2 * x) + x * b def eq(x1, x2): return fss_op(x1, x2, "eq") def le(x1, x2): return fss_op(x1, x2, "comp") class DPF: def __init__(self): pass @staticmethod def keygen(n_values=1): beta = th.tensor([1], dtype=dtype) alpha = th.randint(0, 2 ** n, (n_values,)) α = bit_decomposition(alpha) s, t, CW = ( Array(n + 1, 2, λ, n_values), Array(n + 1, 2, n_values), Array(n, 2 * (λ + 1), n_values), ) s[0] = randbit(size=(2, λ, n_values)) t[0] = th.tensor([[0, 1]] * n_values, dtype=th.uint8).t() for i in range(0, n): g0 = G(s[i, 0]) g1 = G(s[i, 1]) sL_0, _, sR_0, _ = split(g0, [λ, 1, λ, 1]) sL_1, _, sR_1, _ = split(g1, [λ, 1, λ, 1]) s_rand = (sL_0 ^ sL_1) * α[i] + (sR_0 ^ sR_1) * (1 - α[i]) cw_i = TruthTableDPF(s_rand, α[i]) CW[i] = cw_i ^ g0 ^ g1 for b in (0, 1): τ = [g0, g1][b] ^ (t[i, b] * CW[i]) τ = τ.reshape(2, λ + 1, n_values) α_i = α[i].unsqueeze(0).expand(λ + 1, n_values).unsqueeze(0).long() filtered_τ = th.gather(τ, 0, α_i).squeeze(0) s[i + 1, b], t[i + 1, b] = split(filtered_τ, [λ, 1]) CW_n = (-1) ** t[n, 1].to(dtype) * (beta - Convert(s[n, 0]) + Convert(s[n, 1])) return (alpha,) + s[0].unbind() + (CW, CW_n) @staticmethod def eval(b, x, *k_b): original_shape = x.shape x = x.reshape(-1) n_values = x.shape[0] x = bit_decomposition(x) s, t = Array(n + 1, λ, n_values), Array(n + 1, 1, n_values) s[0] = k_b[0] CW = k_b[1].unbind() + (k_b[2],) t[0] = b for i in range(0, n): τ = G(s[i]) ^ (t[i] * CW[i]) τ = τ.reshape(2, λ + 1, n_values) x_i = x[i].unsqueeze(0).expand(λ + 1, n_values).unsqueeze(0).long() filtered_τ = th.gather(τ, 0, x_i).squeeze(0) s[i + 1], t[i + 1] = split(filtered_τ, [λ, 1]) flat_result = (-1) ** b * (Convert(s[n]) + t[n].squeeze() * CW[n]) return flat_result.reshape(original_shape) class DIF: def __init__(self): pass @staticmethod def keygen(n_values=1): alpha = th.randint(0, 2 ** n, (n_values,)) α = bit_decomposition(alpha) s, t, CW = ( Array(n + 1, 2, λ, n_values), Array(n + 1, 2, n_values), Array(n, 2 + 2 * (λ + 1), n_values), ) s[0] = randbit(size=(2, λ, n_values)) t[0] = th.tensor([[0, 1]] * n_values, dtype=th.uint8).t() for i in range(0, n): h0 = H(s[i, 0]) h1 = H(s[i, 1]) _, _, sL_0, _, sR_0, _ = split(h0, [1, 1, λ, 1, λ, 1]) _, _, sL_1, _, sR_1, _ = split(h1, [1, 1, λ, 1, λ, 1]) s_rand = (sL_0 ^ sL_1) * α[i] + (sR_0 ^ sR_1) * (1 - α[i]) cw_i = TruthTableDIF(s_rand, α[i]) CW[i] = cw_i ^ h0 ^ h1 for b in (0, 1): τ = [h0, h1][b] ^ (t[i, b] * CW[i]) τ = τ.reshape(2, λ + 2, n_values) α_i = α[i].unsqueeze(0).expand(λ + 2, n_values).unsqueeze(0).long() filtered_τ = th.gather(τ, 0, α_i).squeeze(0) σ_leaf, s[i + 1, b], t[i + 1, b] = split(filtered_τ, [1, λ, 1]) return (alpha,) + s[0].unbind() + (CW,) @staticmethod def eval(b, x, *k_b): original_shape = x.shape x = x.reshape(-1) n_values = x.shape[0] x = bit_decomposition(x) FnOutput = Array(n + 1, n_values) s, t = Array(n + 1, λ, n_values), Array(n + 1, 1, n_values) s[0] = k_b[0] CW = k_b[1].unbind() t[0] = b for i in range(0, n): τ = H(s[i]) ^ (t[i] * CW[i]) τ = τ.reshape(2, λ + 2, n_values) x_i = x[i].unsqueeze(0).expand(λ + 2, n_values).unsqueeze(0).long() filtered_τ = th.gather(τ, 0, x_i).squeeze(0) σ_leaf, s[i + 1], t[i + 1] = split(filtered_τ, [1, λ, 1]) FnOutput[i] = σ_leaf FnOutput[n] = t[n] flat_result = FnOutput.sum(axis=0) % 2 return flat_result.reshape(original_shape) def G(seed): assert seed.shape[0] == λ seed_t = seed.t().tolist() gen_list = [] for seed_bit in seed_t: enc_str = str(seed_bit).encode() h = hashlib.sha3_256(enc_str) r = h.digest() binary_str = bin(int.from_bytes(r, byteorder="big"))[2 : 2 + (2 * (λ + 1))] gen_list.append(list(map(int, binary_str))) return th.tensor(gen_list, dtype=th.uint8).t() def H(seed): assert seed.shape[0] == λ seed_t = seed.t().tolist() gen_list = [] for seed_bit in seed_t: enc_str = str(seed_bit).encode() h = hashlib.sha3_256(enc_str) r = h.digest() binary_str = bin(int.from_bytes(r, byteorder="big"))[2 : 2 + 2 + (2 * (λ + 1))] gen_list.append(list(map(int, binary_str))) return th.tensor(gen_list, dtype=th.uint8).t() def Convert(bits): bit_pow_lambda = th.flip(2 ** th.arange(λ), (0,)).unsqueeze(-1).to(th.long) return (bits.to(th.long) * bit_pow_lambda).sum(dim=0).to(dtype) def Array(*shape): return th.empty(shape, dtype=th.uint8) bit_pow_n = th.flip(2 ** th.arange(n), (0,)) def bit_decomposition(x): x = x.unsqueeze(-1) z = bit_pow_n & x z = z.t() return (z > 0).to(th.uint8) def randbit(size): return th.randint(2, size=size) def concat(*args, **kwargs): return th.cat(args, **kwargs) def split(x, idx): return th.split(x, idx) def TruthTableDPF(s, α_i): one = th.ones((1, s.shape[1])).to(th.uint8) s_one = concat(s, one) Table = th.zeros((2, λ + 1, len(α_i)), dtype=th.uint8) for j, el in enumerate(α_i): Table[el.item(), :, j] = s_one[:, j] return Table.reshape(-1, Table.shape[2]) def TruthTableDIF(s, α_i): leafTable = th.zeros((2, 1, len(α_i)), dtype=th.uint8) leaf_value = α_i for j, el in enumerate(α_i): leafTable[(1 - el).item(), 0, j] = leaf_value[j] one = th.ones((1, s.shape[1])).to(th.uint8) s_one = concat(s, one) nextTable = th.zeros((2, λ + 1, len(α_i)), dtype=th.uint8) for j, el in enumerate(α_i): nextTable[el.item(), :, j] = s_one[:, j] Table = concat(leafTable, nextTable, axis=1) Table = Table.reshape(-1, Table.shape[2]) return Table
true
true
f703f1fd222c2a9e77a5a7e4c6b60bababcf5e23
1,742
py
Python
python-pscheduler/pscheduler/tests/limitprocessor_identifier_localsubnet_test.py
krihal/pscheduler
e69e0357797d88d290c78b92b1d99048e73a63e8
[ "Apache-2.0" ]
47
2016-09-28T14:19:10.000Z
2022-03-21T13:26:47.000Z
python-pscheduler/pscheduler/tests/limitprocessor_identifier_localsubnet_test.py
krihal/pscheduler
e69e0357797d88d290c78b92b1d99048e73a63e8
[ "Apache-2.0" ]
993
2016-07-07T19:30:32.000Z
2022-03-21T10:25:52.000Z
python-pscheduler/pscheduler/tests/limitprocessor_identifier_localsubnet_test.py
mfeit-internet2/pscheduler-dev
d2cd4065a6fce88628b0ca63edc7a69f2672dad2
[ "Apache-2.0" ]
36
2016-09-15T09:39:45.000Z
2021-06-23T15:05:13.000Z
#!/usr/bin/env python3 """ Test for local-subnet identifier """ import unittest import netifaces from base_test import PschedTestBase from pscheduler.limitprocessor.identifier.localsubnet import * DATA = { } class TestLimitprocessorIdentifierLocalSubnet(PschedTestBase): """ Test the Identifier """ def test_data_is_valid(self): """Limit Processor / Identifier Local Subnet / Data Validation""" self.assertEqual(data_is_valid(DATA), (True, "OK")) self.assertEqual(data_is_valid({ "abc": 123 }), (False, 'Data is not an object or not empty.')) def test_identifier(self): """Limit Processor / Identifier Local Subnet / Identifier""" test_ifaces = { "lo0": { netifaces.AF_INET: [ {'addr': '127.0.0.1', 'netmask': '255.0.0.0', 'peer': '127.0.0.1'} ], netifaces.AF_INET6: [ {'addr': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128', 'peer': '::1', 'flags': 0}, {'addr': 'fe80::1%lo0', 'netmask': 'ffff:ffff:ffff:ffff::/64', 'flags': 0} ] } } ident = IdentifierLocalSubnet(DATA, test_ifaces=test_ifaces) self.assertEqual( ident.evaluate({ "requester": "127.0.0.5" }), True) self.assertEqual( ident.evaluate({ "requester": "fe80::1" }), True) self.assertEqual( ident.evaluate({ "requester": "192.0.2.9" }), False) self.assertEqual( ident.evaluate({ "requester": "2001:db8::1" }), False) if __name__ == '__main__': unittest.main()
24.885714
122
0.539036
import unittest import netifaces from base_test import PschedTestBase from pscheduler.limitprocessor.identifier.localsubnet import * DATA = { } class TestLimitprocessorIdentifierLocalSubnet(PschedTestBase): def test_data_is_valid(self): self.assertEqual(data_is_valid(DATA), (True, "OK")) self.assertEqual(data_is_valid({ "abc": 123 }), (False, 'Data is not an object or not empty.')) def test_identifier(self): test_ifaces = { "lo0": { netifaces.AF_INET: [ {'addr': '127.0.0.1', 'netmask': '255.0.0.0', 'peer': '127.0.0.1'} ], netifaces.AF_INET6: [ {'addr': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128', 'peer': '::1', 'flags': 0}, {'addr': 'fe80::1%lo0', 'netmask': 'ffff:ffff:ffff:ffff::/64', 'flags': 0} ] } } ident = IdentifierLocalSubnet(DATA, test_ifaces=test_ifaces) self.assertEqual( ident.evaluate({ "requester": "127.0.0.5" }), True) self.assertEqual( ident.evaluate({ "requester": "fe80::1" }), True) self.assertEqual( ident.evaluate({ "requester": "192.0.2.9" }), False) self.assertEqual( ident.evaluate({ "requester": "2001:db8::1" }), False) if __name__ == '__main__': unittest.main()
true
true
f703f2014bff202689cdafa2e3aaea89acf87846
173
py
Python
Hip/Kernels/RadixSort.py
EmilPi/PuzzleLib
31aa0fab3b5e9472b9b9871ca52e4d94ea683fa9
[ "Apache-2.0" ]
52
2020-02-28T20:40:15.000Z
2021-08-25T05:35:17.000Z
Hip/Kernels/RadixSort.py
EmilPi/PuzzleLib
31aa0fab3b5e9472b9b9871ca52e4d94ea683fa9
[ "Apache-2.0" ]
2
2021-02-14T15:57:03.000Z
2021-10-05T12:21:34.000Z
Hip/Kernels/RadixSort.py
EmilPi/PuzzleLib
31aa0fab3b5e9472b9b9871ca52e4d94ea683fa9
[ "Apache-2.0" ]
8
2020-02-28T20:40:11.000Z
2020-07-09T13:27:23.000Z
from PuzzleLib.Cuda.Kernels.RadixSort import backendTest def unittest(): from PuzzleLib.Hip import Backend backendTest(Backend) if __name__ == "__main__": unittest()
15.727273
56
0.774566
from PuzzleLib.Cuda.Kernels.RadixSort import backendTest def unittest(): from PuzzleLib.Hip import Backend backendTest(Backend) if __name__ == "__main__": unittest()
true
true
f703f23f6d1cd8016330bee76599c757747a9bf8
1,249
py
Python
setup.py
charlon/axdd-django-vue
86c1ca4a6be4e1f4ae1d534296c7d2f58d0c6fb7
[ "Apache-2.0" ]
null
null
null
setup.py
charlon/axdd-django-vue
86c1ca4a6be4e1f4ae1d534296c7d2f58d0c6fb7
[ "Apache-2.0" ]
5
2020-12-29T18:52:27.000Z
2020-12-29T19:33:29.000Z
setup.py
charlon/axdd-django-vue
86c1ca4a6be4e1f4ae1d534296c7d2f58d0c6fb7
[ "Apache-2.0" ]
null
null
null
import os from setuptools import setup README = """ See the README on `GitHub <https://github.com/uw-it-aca/app_name>`_. """ # The VERSION file is created by travis-ci, based on the tag name version_path = "app_name/VERSION" print(os.path.join(os.path.dirname(__file__), version_path)) VERSION = open(os.path.join(os.path.dirname(__file__), version_path)).read() VERSION = VERSION.replace("\n", "") # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) url = "https://github.com/uw-it-aca/app_name" setup( name="app_name", version=VERSION, packages=["app_name"], author="UW-IT AXDD", author_email="aca-it@uw.edu", include_package_data=True, install_requires=[ 'django~=3.2', "django-webpack-loader", ], license="Apache License, Version 2.0", description="A tool for visually displaying UW course prerequisites", long_description=README, url=url, classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", ], )
29.738095
78
0.670136
import os from setuptools import setup README = """ See the README on `GitHub <https://github.com/uw-it-aca/app_name>`_. """ version_path = "app_name/VERSION" print(os.path.join(os.path.dirname(__file__), version_path)) VERSION = open(os.path.join(os.path.dirname(__file__), version_path)).read() VERSION = VERSION.replace("\n", "") os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) url = "https://github.com/uw-it-aca/app_name" setup( name="app_name", version=VERSION, packages=["app_name"], author="UW-IT AXDD", author_email="aca-it@uw.edu", include_package_data=True, install_requires=[ 'django~=3.2', "django-webpack-loader", ], license="Apache License, Version 2.0", description="A tool for visually displaying UW course prerequisites", long_description=README, url=url, classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", ], )
true
true
f703f3cff99eda5f18479dfe895420bc183bdc50
2,301
py
Python
ml/helpers.py
JamesXChang/label_tool
f62470a2bf677a2dd1d18054baf2d651d69c83a9
[ "Apache-2.0" ]
null
null
null
ml/helpers.py
JamesXChang/label_tool
f62470a2bf677a2dd1d18054baf2d651d69c83a9
[ "Apache-2.0" ]
4
2021-06-02T02:33:35.000Z
2022-03-12T00:42:39.000Z
ml/helpers.py
JamesXChang/label_tool
f62470a2bf677a2dd1d18054baf2d651d69c83a9
[ "Apache-2.0" ]
null
null
null
from abc import abstractmethod from ml import LabelStudioMLBase class LabelStudioMLBaseHelper(LabelStudioMLBase): @abstractmethod def prepare_tasks(self, tasks, workdir=None, **kwargs): pass @abstractmethod def convert_predictions(self, predictions, **kwargs): pass @abstractmethod def predict2(self, X, y=None, **kwargs): pass @abstractmethod def fit2(self, X, y, **kwargs): pass def predict(self, tasks, **kwargs): X, y = self.prepare_tasks(tasks, **kwargs) predictions = self.predict2(X, y, **kwargs) result = self.convert_predictions(predictions, **kwargs) return result def fit(self, completions, workdir=None, **kwargs): X, y = self.prepare_tasks(completions, workdir=workdir, **kwargs) return self.fit2(X, y, **kwargs) def _has_annotation(self, task): return 'completions' in task class LabelStudioMLChoices(LabelStudioMLBaseHelper): def __init__(self, **kwargs): super(LabelStudioMLChoices, self).__init__(**kwargs) assert len(self.parsed_label_config) == 1 self.from_name, self.info = list(self.parsed_label_config.items())[0] assert self.info['type'] == 'Choices' assert len(self.info['to_name']) == 1 assert len(self.info['inputs']) == 1 self.to_name = self.info['to_name'][0] self.value = self.info['inputs'][0]['value'] def prepare_tasks(self, tasks, workdir=None, **kwargs): X, y = [], [] for task in tasks: X.append(task['data'][self.value]) if self._has_annotation(task): choices = task['completions'][0]['result'][0]['value']['choices'] y.append(choices) else: y.append(None) return X, y def convert_predictions(self, predictions, **kwargs): list_choices, scores = predictions results = [] for choices, score in zip(list_choices, scores): result = [{ 'from_name': self.from_name, 'to_name': self.to_name, 'type': 'choices', 'value': {'choices': choices} }] results.append({'result': result, 'score': score}) return results
32.408451
81
0.592351
from abc import abstractmethod from ml import LabelStudioMLBase class LabelStudioMLBaseHelper(LabelStudioMLBase): @abstractmethod def prepare_tasks(self, tasks, workdir=None, **kwargs): pass @abstractmethod def convert_predictions(self, predictions, **kwargs): pass @abstractmethod def predict2(self, X, y=None, **kwargs): pass @abstractmethod def fit2(self, X, y, **kwargs): pass def predict(self, tasks, **kwargs): X, y = self.prepare_tasks(tasks, **kwargs) predictions = self.predict2(X, y, **kwargs) result = self.convert_predictions(predictions, **kwargs) return result def fit(self, completions, workdir=None, **kwargs): X, y = self.prepare_tasks(completions, workdir=workdir, **kwargs) return self.fit2(X, y, **kwargs) def _has_annotation(self, task): return 'completions' in task class LabelStudioMLChoices(LabelStudioMLBaseHelper): def __init__(self, **kwargs): super(LabelStudioMLChoices, self).__init__(**kwargs) assert len(self.parsed_label_config) == 1 self.from_name, self.info = list(self.parsed_label_config.items())[0] assert self.info['type'] == 'Choices' assert len(self.info['to_name']) == 1 assert len(self.info['inputs']) == 1 self.to_name = self.info['to_name'][0] self.value = self.info['inputs'][0]['value'] def prepare_tasks(self, tasks, workdir=None, **kwargs): X, y = [], [] for task in tasks: X.append(task['data'][self.value]) if self._has_annotation(task): choices = task['completions'][0]['result'][0]['value']['choices'] y.append(choices) else: y.append(None) return X, y def convert_predictions(self, predictions, **kwargs): list_choices, scores = predictions results = [] for choices, score in zip(list_choices, scores): result = [{ 'from_name': self.from_name, 'to_name': self.to_name, 'type': 'choices', 'value': {'choices': choices} }] results.append({'result': result, 'score': score}) return results
true
true
f703f43c75dc352fffd4d9a11fd1a6923562ac59
6,295
py
Python
mercadobitcoin/trade_api.py
carlettibruno/python-mercadobitcoin
dda94ba0b6f2a545e3ceb4480f6a51900322a031
[ "MIT" ]
null
null
null
mercadobitcoin/trade_api.py
carlettibruno/python-mercadobitcoin
dda94ba0b6f2a545e3ceb4480f6a51900322a031
[ "MIT" ]
null
null
null
mercadobitcoin/trade_api.py
carlettibruno/python-mercadobitcoin
dda94ba0b6f2a545e3ceb4480f6a51900322a031
[ "MIT" ]
null
null
null
import requests import urllib import time import hashlib import hmac import itertools try: from urllib.parse import urlencode except ImportError: from urllib import urlencode from .api import Base from .errors import ApiError, ArgumentError def check_values(value, arg, arg_value): if type(value) == type: if type(arg_value) != value: raise ArgumentError(u"Type of argument {} is invalid. It should be {}".format(arg, value)) elif arg_value not in value: raise ArgumentError(u"Value of argument {} is invalid. It should be one of {}".format(arg, value)) def check_args(kwargs, required_parameters, optional_parameters={}): args = kwargs.keys() required_args = required_parameters.keys() optional_args = optional_parameters.keys() missing_args = list(set(required_args) - set(args)) if len(missing_args) > 0: raise ArgumentError(u"Parameter {} is required".format(missing_args)) for arg_name, arg_value in kwargs.items(): if arg_name in optional_args: optional_value = optional_parameters[arg_name] check_values(optional_value, arg_name, arg_value) elif arg_name in required_args: required_value = required_parameters[arg_name] check_values(required_value, arg_name, arg_value) class TradeApi(Base): def __init__(self, identifier=None, secret=None): self.id = identifier self.secret = secret self.path = "/tapi/v3/" self.available_pairs = ["BRLBTC", "BRLLTC", "BRLBCH", "BRLXRP", "BRLETH", "BRLUSDC", "BRLMBPRK01", "BRLMBPRK02", "BRLMBPRK03", "BRLMBPRK04", "BRLMBCONS01"] Base.__init__(self) def list_system_messages(self, level="INFO"): """https://www.mercadobitcoin.com.br/trade-api/#list_system_messages""" payload = { "level": level } check_args(payload, { "level": ["INFO", "WARNING", "ERROR"] }) return self.__check_response(self.__post_tapi("list_system_messages", payload)) def get_account_info(self): """https://www.mercadobitcoin.com.br/trade-api/#get_account_info""" return self.__check_response(self.__post_tapi("get_account_info")) def get_order(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#get_order""" check_args(kwargs, { "coin_pair": self.available_pairs, "order_id": int }) return self.__check_response(self.__post_tapi("get_order", kwargs)) def list_orders(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#list_orders""" check_args(kwargs, { "coin_pair": self.available_pairs }, { "order_type": [1, 2], "status_list": str, "has_fills": [True, False], "from_id": int, "to_id": int, "from_timestamp": str, "to_timestamp": str }) return self.__check_response(self.__post_tapi("list_orders", kwargs )) def list_orderbook(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#list_orderbook""" check_args(kwargs, { "coin_pair": self.available_pairs }, { "full": [True, False] }) return self.__check_response(self.__post_tapi("list_orderbook", kwargs )) def place_buy_order(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#place_buy_order""" check_args(kwargs, { "coin_pair": self.available_pairs, "quantity": str, "limit_price": str }) return self.__check_response(self.__post_tapi("place_buy_order", kwargs )) def place_sell_order(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#place_sell_order""" check_args(kwargs, { "coin_pair": self.available_pairs, "quantity": str, "limit_price": str }) return self.__check_response(self.__post_tapi("place_sell_order", kwargs )) def cancel_order(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#cancel_order""" check_args(kwargs, { "coin_pair": self.available_pairs, "order_id": int }) return self.__check_response(self.__post_tapi("cancel_order", kwargs )) def get_withdrawal(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#get_withdrawal""" check_args(kwargs, { "coin": self.available_pairs, "withdrawal_id": int }) return self.__check_response(self.__post_tapi("get_withdrawal", kwargs )) def withdraw_coin_brl(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#withdraw_coin""" check_args(kwargs, { "coin": ["BRL"], "quantity": str, "account_ref": str }, { "description": str }) return self.__check_response(self.__post_tapi("withdraw_coin", kwargs )) def withdraw_coin(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#withdraw_coin""" check_args(kwargs, { "coin": ["BTC", "LTC", "BCH", "ETH"], "quantity": str, "address": str, "tx_fee": str }, { "description": str }) return self.__check_response(self.__post_tapi("withdraw_coin", kwargs )) def withdraw_coin_xrp(self, **kwargs): """https://www.mercadobitcoin.com.br/trade-api/#withdraw_coin""" check_args(kwargs, { "coin": ["XRP"], "quantity": str, "address": str, "tx_fee": str, "destination_tag": int }, { "description": str }) return self.__check_response(self.__post_tapi("withdraw_coin", kwargs )) def __check_response(self, response): if response["status_code"] == 100: return response["response_data"] else: raise ApiError(response["error_message"], response["status_code"]) def __post_tapi(self, method, params={}): payload = { "tapi_method": method, "tapi_nonce": str(int(time.time()*1000000))} payload.update(params) headers = { "Content-Type": "application/x-www-form-urlencoded", "TAPI-ID": self.id, "TAPI-MAC": self.__signature(payload) } response = requests.post("https://{}{}".format(self.host, self.path), headers=headers, data=payload) return response.json() def __signature(self, payload): signature = hmac.new(self.secret, digestmod=hashlib.sha512) params = self.path + '?' + urlencode(payload) signature.update(params.encode('utf-8')) return signature.hexdigest()
39.099379
213
0.660524
import requests import urllib import time import hashlib import hmac import itertools try: from urllib.parse import urlencode except ImportError: from urllib import urlencode from .api import Base from .errors import ApiError, ArgumentError def check_values(value, arg, arg_value): if type(value) == type: if type(arg_value) != value: raise ArgumentError(u"Type of argument {} is invalid. It should be {}".format(arg, value)) elif arg_value not in value: raise ArgumentError(u"Value of argument {} is invalid. It should be one of {}".format(arg, value)) def check_args(kwargs, required_parameters, optional_parameters={}): args = kwargs.keys() required_args = required_parameters.keys() optional_args = optional_parameters.keys() missing_args = list(set(required_args) - set(args)) if len(missing_args) > 0: raise ArgumentError(u"Parameter {} is required".format(missing_args)) for arg_name, arg_value in kwargs.items(): if arg_name in optional_args: optional_value = optional_parameters[arg_name] check_values(optional_value, arg_name, arg_value) elif arg_name in required_args: required_value = required_parameters[arg_name] check_values(required_value, arg_name, arg_value) class TradeApi(Base): def __init__(self, identifier=None, secret=None): self.id = identifier self.secret = secret self.path = "/tapi/v3/" self.available_pairs = ["BRLBTC", "BRLLTC", "BRLBCH", "BRLXRP", "BRLETH", "BRLUSDC", "BRLMBPRK01", "BRLMBPRK02", "BRLMBPRK03", "BRLMBPRK04", "BRLMBCONS01"] Base.__init__(self) def list_system_messages(self, level="INFO"): payload = { "level": level } check_args(payload, { "level": ["INFO", "WARNING", "ERROR"] }) return self.__check_response(self.__post_tapi("list_system_messages", payload)) def get_account_info(self): return self.__check_response(self.__post_tapi("get_account_info")) def get_order(self, **kwargs): check_args(kwargs, { "coin_pair": self.available_pairs, "order_id": int }) return self.__check_response(self.__post_tapi("get_order", kwargs)) def list_orders(self, **kwargs): check_args(kwargs, { "coin_pair": self.available_pairs }, { "order_type": [1, 2], "status_list": str, "has_fills": [True, False], "from_id": int, "to_id": int, "from_timestamp": str, "to_timestamp": str }) return self.__check_response(self.__post_tapi("list_orders", kwargs )) def list_orderbook(self, **kwargs): check_args(kwargs, { "coin_pair": self.available_pairs }, { "full": [True, False] }) return self.__check_response(self.__post_tapi("list_orderbook", kwargs )) def place_buy_order(self, **kwargs): check_args(kwargs, { "coin_pair": self.available_pairs, "quantity": str, "limit_price": str }) return self.__check_response(self.__post_tapi("place_buy_order", kwargs )) def place_sell_order(self, **kwargs): check_args(kwargs, { "coin_pair": self.available_pairs, "quantity": str, "limit_price": str }) return self.__check_response(self.__post_tapi("place_sell_order", kwargs )) def cancel_order(self, **kwargs): check_args(kwargs, { "coin_pair": self.available_pairs, "order_id": int }) return self.__check_response(self.__post_tapi("cancel_order", kwargs )) def get_withdrawal(self, **kwargs): check_args(kwargs, { "coin": self.available_pairs, "withdrawal_id": int }) return self.__check_response(self.__post_tapi("get_withdrawal", kwargs )) def withdraw_coin_brl(self, **kwargs): check_args(kwargs, { "coin": ["BRL"], "quantity": str, "account_ref": str }, { "description": str }) return self.__check_response(self.__post_tapi("withdraw_coin", kwargs )) def withdraw_coin(self, **kwargs): check_args(kwargs, { "coin": ["BTC", "LTC", "BCH", "ETH"], "quantity": str, "address": str, "tx_fee": str }, { "description": str }) return self.__check_response(self.__post_tapi("withdraw_coin", kwargs )) def withdraw_coin_xrp(self, **kwargs): check_args(kwargs, { "coin": ["XRP"], "quantity": str, "address": str, "tx_fee": str, "destination_tag": int }, { "description": str }) return self.__check_response(self.__post_tapi("withdraw_coin", kwargs )) def __check_response(self, response): if response["status_code"] == 100: return response["response_data"] else: raise ApiError(response["error_message"], response["status_code"]) def __post_tapi(self, method, params={}): payload = { "tapi_method": method, "tapi_nonce": str(int(time.time()*1000000))} payload.update(params) headers = { "Content-Type": "application/x-www-form-urlencoded", "TAPI-ID": self.id, "TAPI-MAC": self.__signature(payload) } response = requests.post("https://{}{}".format(self.host, self.path), headers=headers, data=payload) return response.json() def __signature(self, payload): signature = hmac.new(self.secret, digestmod=hashlib.sha512) params = self.path + '?' + urlencode(payload) signature.update(params.encode('utf-8')) return signature.hexdigest()
true
true
f703f4a799b82e96118311c0cdc441dd57f0bada
89
py
Python
tests/test_umonitor.py
RuslanSergeev/uMonitor
f1d4f8e5981d436b8405968ce6273edc0ee0b83a
[ "MIT" ]
null
null
null
tests/test_umonitor.py
RuslanSergeev/uMonitor
f1d4f8e5981d436b8405968ce6273edc0ee0b83a
[ "MIT" ]
null
null
null
tests/test_umonitor.py
RuslanSergeev/uMonitor
f1d4f8e5981d436b8405968ce6273edc0ee0b83a
[ "MIT" ]
null
null
null
from umonitor import __version__ def test_version(): assert __version__ == '0.1.5'
14.833333
33
0.719101
from umonitor import __version__ def test_version(): assert __version__ == '0.1.5'
true
true
f703f5954f5053b294d9b57829d8c3ce8017ee49
3,126
py
Python
main10.py
jsievert73/E01a-Control-Structues
46ec89d0e7bcf09312127a29586d746c2d0033fc
[ "MIT" ]
null
null
null
main10.py
jsievert73/E01a-Control-Structues
46ec89d0e7bcf09312127a29586d746c2d0033fc
[ "MIT" ]
null
null
null
main10.py
jsievert73/E01a-Control-Structues
46ec89d0e7bcf09312127a29586d746c2d0033fc
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import sys, utils, random # import the modules we will need utils.check_version((3,7)) # make sure we are running at least Python 3.7 utils.clear() # clear the screen print('Greetings!') # prints out "Greetings!" in the terminal. colors = ['red','orange','yellow','green','blue','violet','purple'] # creates a list of colors which will be saved for future use. play_again = '' # creates a variable called "play_again" that is just a space at the moemnt best_count = sys.maxsize # the biggest number, which makes it so that the first time they play the game, they will get their best guess so far. while (play_again != 'n' and play_again != 'no'): # will repeat the game, as long as the player has not responded negatively to playing again. match_color = random.choice(colors) # the program picks a random color from the list we created earlier so the game is different every time. count = 0 # starts a counter at 0 that will be used to check how many attempts the user had to go through in order to guess the correct color color = '' # creates the variable color, which will soon be replaced by the user's input. while (color != match_color): # will run this loop while the color does not match the randomly selected color color = input("\nWhat is my favorite color? ") #\n is a special code that adds a new line this is also taking an input from the user after printing "What is my favorite color?" in the window. color = color.lower().strip() # this line takes the user's guessed color and strips it of spaces as well as downcasing all letters count += 1 # this adds one to the count variable, tracking that the user just made a guess. if (color == match_color): # checks if the guessed color matches the randomly selected color. print('Correct!') # if so the program will print "Correct!" else: # if the above check does not return true, the program will run what falls under this line. print('Sorry, try again. You have guessed {guesses} times.'.format(guesses=count)) # the program prints the text within the quotes while replacing {guesses} with the variable saved in count print('\nYou guessed it in {0} tries!'.format(count)) #the program prints the text within the counts and replaces {0} with the variable stored in count if (count < best_count): # checks if the player had to use less guesses then their best run of this game so far. print('This was your best guess so far!') # if the above check returns true, then the program prints the text within the quotes. best_count = count # if the above check returns true, the current count for this game replaces best_count as the new record. play_again = input("\nWould you like to play again? ").lower().strip() #checks if the player would like to play again, and strips and downcases the input to save as the play_again input print('Thanks for playing!') #once the player has ended the game by responded with "n" or "no" the program prints the text with quotes on this line.
104.2
205
0.712732
import sys, utils, random utils.check_version((3,7)) utils.clear() print('Greetings!') colors = ['red','orange','yellow','green','blue','violet','purple'] play_again = '' best_count = sys.maxsize while (play_again != 'n' and play_again != 'no'): match_color = random.choice(colors) count = 0 color = '' while (color != match_color): # will run this loop while the color does not match the randomly selected color color = input("\nWhat is my favorite color? ") #\n is a special code that adds a new line this is also taking an input from the user after printing "What is my favorite color?" in the window. color = color.lower().strip() # this line takes the user's guessed color and strips it of spaces as well as downcasing all letters count += 1 if (color == match_color): print('Correct!') else: print('Sorry, try again. You have guessed {guesses} times.'.format(guesses=count)) print('\nYou guessed it in {0} tries!'.format(count)) if (count < best_count): print('This was your best guess so far!') best_count = count play_again = input("\nWould you like to play again? ").lower().strip() print('Thanks for playing!')
true
true
f703f682b15e75b60b527c82494bd2fdcf1e44d9
17,612
py
Python
test/functional/test_framework/test_node.py
bitcoinexodus/bitcoinexodus-source
742661b3dc9abce61c05fa1561b7fd9496629866
[ "MIT" ]
null
null
null
test/functional/test_framework/test_node.py
bitcoinexodus/bitcoinexodus-source
742661b3dc9abce61c05fa1561b7fd9496629866
[ "MIT" ]
null
null
null
test/functional/test_framework/test_node.py
bitcoinexodus/bitcoinexodus-source
742661b3dc9abce61c05fa1561b7fd9496629866
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Copyright (c) 2017-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Class for bitcoinexodusd node under test""" import contextlib import decimal import errno from enum import Enum import http.client import json import logging import os import re import subprocess import tempfile import time import urllib.parse from .authproxy import JSONRPCException from .util import ( append_config, delete_cookie_file, get_rpc_proxy, rpc_url, wait_until, p2p_port, ) # For Python 3.4 compatibility JSONDecodeError = getattr(json, "JSONDecodeError", ValueError) BITCOINEXODUSD_PROC_WAIT_TIMEOUT = 60 class FailedToStartError(Exception): """Raised when a node fails to start correctly.""" class ErrorMatch(Enum): FULL_TEXT = 1 FULL_REGEX = 2 PARTIAL_REGEX = 3 class TestNode(): """A class for representing a bitcoinexodusd node under test. This class contains: - state about the node (whether it's running, etc) - a Python subprocess.Popen object representing the running process - an RPC connection to the node - one or more P2P connections to the node To make things easier for the test writer, any unrecognised messages will be dispatched to the RPC connection.""" def __init__(self, i, datadir, *, rpchost, timewait, bitcoinexodusd, bitcoinexodus_cli, mocktime, coverage_dir, extra_conf=None, extra_args=None, use_cli=False): self.index = i self.datadir = datadir self.stdout_dir = os.path.join(self.datadir, "stdout") self.stderr_dir = os.path.join(self.datadir, "stderr") self.rpchost = rpchost self.rpc_timeout = timewait self.binary = bitcoinexodusd self.coverage_dir = coverage_dir if extra_conf != None: append_config(datadir, extra_conf) # Most callers will just need to add extra args to the standard list below. # For those callers that need more flexibility, they can just set the args property directly. # Note that common args are set in the config file (see initialize_datadir) self.extra_args = extra_args self.args = [ self.binary, "-datadir=" + self.datadir, "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i ] self.cli = TestNodeCLI(bitcoinexodus_cli, self.datadir) self.use_cli = use_cli self.running = False self.process = None self.rpc_connected = False self.rpc = None self.url = None self.log = logging.getLogger('TestFramework.node%d' % i) self.cleanup_on_exit = True # Whether to kill the node when this object goes away self.p2ps = [] def get_deterministic_priv_key(self): """Return a deterministic priv key in base58, that only depends on the node's index""" PRIV_KEYS = [ # adress , privkey ('mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z', 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'), ('msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg', 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'), ('mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP', 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'), ('mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR', 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'), ('msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws', 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'), ('n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi', 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'), ('myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6', 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'), ('mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8', 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'), ('mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg', 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'), ] return PRIV_KEYS[self.index] def _node_msg(self, msg: str) -> str: """Return a modified msg that identifies this node by its index as a debugging aid.""" return "[node %d] %s" % (self.index, msg) def _raise_assertion_error(self, msg: str): """Raise an AssertionError with msg modified to identify this node.""" raise AssertionError(self._node_msg(msg)) def __del__(self): # Ensure that we don't leave any bitcoinexodusd processes lying around after # the test ends if self.process and self.cleanup_on_exit: # Should only happen on test failure # Avoid using logger, as that may have already been shutdown when # this destructor is called. print(self._node_msg("Cleaning up leftover process")) self.process.kill() def __getattr__(self, name): """Dispatches any unrecognised messages to the RPC connection or a CLI instance.""" if self.use_cli: return getattr(self.cli, name) else: assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection") return getattr(self.rpc, name) def start(self, extra_args=None, *, stdout=None, stderr=None, **kwargs): """Start the node.""" if extra_args is None: extra_args = self.extra_args # Add a new stdout and stderr file each time bitcoinexodusd is started if stderr is None: stderr = tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) if stdout is None: stdout = tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) self.stderr = stderr self.stdout = stdout # Delete any existing cookie file -- if such a file exists (eg due to # unclean shutdown), it will get overwritten anyway by bitcoinexodusd, and # potentially interfere with our attempt to authenticate delete_cookie_file(self.datadir) # add environment variable LIBC_FATAL_STDERR_=1 so that libc errors are written to stderr and not the terminal subp_env = dict(os.environ, LIBC_FATAL_STDERR_="1") self.process = subprocess.Popen(self.args + extra_args, env=subp_env, stdout=stdout, stderr=stderr, **kwargs) self.running = True self.log.debug("bitcoinexodusd started, waiting for RPC to come up") def wait_for_rpc_connection(self): """Sets up an RPC connection to the bitcoinexodusd process. Returns False if unable to connect.""" # Poll at a rate of four times per second poll_per_s = 4 for _ in range(poll_per_s * self.rpc_timeout): if self.process.poll() is not None: raise FailedToStartError(self._node_msg( 'bitcoinexodusd exited with status {} during initialization'.format(self.process.returncode))) try: self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir) self.rpc.getblockcount() # If the call to getblockcount() succeeds then the RPC connection is up self.rpc_connected = True self.url = self.rpc.url self.log.debug("RPC successfully started") return except IOError as e: if e.errno != errno.ECONNREFUSED: # Port not yet open? raise # unknown IO error except JSONRPCException as e: # Initialization phase if e.error['code'] != -28: # RPC in warmup? raise # unknown JSON RPC exception except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoinexodusd still starting if "No RPC credentials" not in str(e): raise time.sleep(1.0 / poll_per_s) self._raise_assertion_error("Unable to connect to bitcoinexodusd") def get_wallet_rpc(self, wallet_name): if self.use_cli: return self.cli("-rpcwallet={}".format(wallet_name)) else: assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected") wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name)) return self.rpc / wallet_path def stop_node(self, expected_stderr=''): """Stop the node.""" if not self.running: return self.log.debug("Stopping node") try: self.stop() except http.client.CannotSendRequest: self.log.exception("Unable to stop node.") # Check that stderr is as expected self.stderr.seek(0) stderr = self.stderr.read().decode('utf-8').strip() if stderr != expected_stderr: raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr)) self.stdout.close() self.stderr.close() del self.p2ps[:] def is_node_stopped(self): """Checks whether the node has stopped. Returns True if the node has stopped. False otherwise. This method is responsible for freeing resources (self.process).""" if not self.running: return True return_code = self.process.poll() if return_code is None: return False # process has stopped. Assert that it didn't return an error code. assert return_code == 0, self._node_msg( "Node returned non-zero exit code (%d) when stopping" % return_code) self.running = False self.process = None self.rpc_connected = False self.rpc = None self.log.debug("Node stopped") return True def wait_until_stopped(self, timeout=BITCOINEXODUSD_PROC_WAIT_TIMEOUT): wait_until(self.is_node_stopped, timeout=timeout) @contextlib.contextmanager def assert_debug_log(self, expected_msgs): debug_log = os.path.join(self.datadir, 'regtest', 'debug.log') with open(debug_log, encoding='utf-8') as dl: dl.seek(0, 2) prev_size = dl.tell() try: yield finally: with open(debug_log, encoding='utf-8') as dl: dl.seek(prev_size) log = dl.read() print_log = " - " + "\n - ".join(log.splitlines()) for expected_msg in expected_msgs: if re.search(re.escape(expected_msg), log, flags=re.MULTILINE) is None: self._raise_assertion_error('Expected message "{}" does not partially match log:\n\n{}\n\n'.format(expected_msg, print_log)) def assert_start_raises_init_error(self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs): """Attempt to start the node and expect it to raise an error. extra_args: extra arguments to pass through to bitcoinexodusd expected_msg: regex that stderr should match when bitcoinexodusd fails Will throw if bitcoinexodusd starts without an error. Will throw if an expected_msg is provided and it does not match bitcoinexodusd's stdout.""" with tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) as log_stderr, \ tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) as log_stdout: try: self.start(extra_args, stdout=log_stdout, stderr=log_stderr, *args, **kwargs) self.wait_for_rpc_connection() self.stop_node() self.wait_until_stopped() except FailedToStartError as e: self.log.debug('bitcoinexodusd failed to start: %s', e) self.running = False self.process = None # Check stderr for expected message if expected_msg is not None: log_stderr.seek(0) stderr = log_stderr.read().decode('utf-8').strip() if match == ErrorMatch.PARTIAL_REGEX: if re.search(expected_msg, stderr, flags=re.MULTILINE) is None: self._raise_assertion_error( 'Expected message "{}" does not partially match stderr:\n"{}"'.format(expected_msg, stderr)) elif match == ErrorMatch.FULL_REGEX: if re.fullmatch(expected_msg, stderr) is None: self._raise_assertion_error( 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr)) elif match == ErrorMatch.FULL_TEXT: if expected_msg != stderr: self._raise_assertion_error( 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr)) else: if expected_msg is None: assert_msg = "bitcoinexodusd should have exited with an error" else: assert_msg = "bitcoinexodusd should have exited with expected error " + expected_msg self._raise_assertion_error(assert_msg) def node_encrypt_wallet(self, passphrase): """"Encrypts the wallet. This causes bitcoinexodusd to shutdown, so this method takes care of cleaning up resources.""" self.encryptwallet(passphrase) self.wait_until_stopped() def add_p2p_connection(self, p2p_conn, *, wait_for_verack=True, **kwargs): """Add a p2p connection to the node. This method adds the p2p connection to the self.p2ps list and also returns the connection to the caller.""" if 'dstport' not in kwargs: kwargs['dstport'] = p2p_port(self.index) if 'dstaddr' not in kwargs: kwargs['dstaddr'] = '127.0.0.1' p2p_conn.peer_connect(**kwargs)() self.p2ps.append(p2p_conn) if wait_for_verack: p2p_conn.wait_for_verack() return p2p_conn @property def p2p(self): """Return the first p2p connection Convenience property - most tests only use a single p2p connection to each node, so this saves having to write node.p2ps[0] many times.""" assert self.p2ps, self._node_msg("No p2p connection") return self.p2ps[0] def disconnect_p2ps(self): """Close all p2p connections to the node.""" for p in self.p2ps: p.peer_disconnect() del self.p2ps[:] class TestNodeCLIAttr: def __init__(self, cli, command): self.cli = cli self.command = command def __call__(self, *args, **kwargs): return self.cli.send_cli(self.command, *args, **kwargs) def get_request(self, *args, **kwargs): return lambda: self(*args, **kwargs) class TestNodeCLI(): """Interface to bitcoinexodus-cli for an individual node""" def __init__(self, binary, datadir): self.options = [] self.binary = binary self.datadir = datadir self.input = None self.log = logging.getLogger('TestFramework.bitcoinexoduscli') def __call__(self, *options, input=None): # TestNodeCLI is callable with bitcoinexodus-cli command-line options cli = TestNodeCLI(self.binary, self.datadir) cli.options = [str(o) for o in options] cli.input = input return cli def __getattr__(self, command): return TestNodeCLIAttr(self, command) def batch(self, requests): results = [] for request in requests: try: results.append(dict(result=request())) except JSONRPCException as e: results.append(dict(error=e)) return results def send_cli(self, command=None, *args, **kwargs): """Run bitcoinexodus-cli command. Deserializes returned string as python object.""" pos_args = [str(arg).lower() if type(arg) is bool else str(arg) for arg in args] named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()] assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same bitcoinexodus-cli call" p_args = [self.binary, "-datadir=" + self.datadir] + self.options if named_args: p_args += ["-named"] if command is not None: p_args += [command] p_args += pos_args + named_args self.log.debug("Running bitcoinexodus-cli command: %s" % command) process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) cli_stdout, cli_stderr = process.communicate(input=self.input) returncode = process.poll() if returncode: match = re.match(r'error code: ([-0-9]+)\nerror message:\n(.*)', cli_stderr) if match: code, message = match.groups() raise JSONRPCException(dict(code=int(code), message=message)) # Ignore cli_stdout, raise with cli_stderr raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr) try: return json.loads(cli_stdout, parse_float=decimal.Decimal) except JSONDecodeError: return cli_stdout.rstrip("\n")
42.541063
165
0.634397
import contextlib import decimal import errno from enum import Enum import http.client import json import logging import os import re import subprocess import tempfile import time import urllib.parse from .authproxy import JSONRPCException from .util import ( append_config, delete_cookie_file, get_rpc_proxy, rpc_url, wait_until, p2p_port, ) JSONDecodeError = getattr(json, "JSONDecodeError", ValueError) BITCOINEXODUSD_PROC_WAIT_TIMEOUT = 60 class FailedToStartError(Exception): class ErrorMatch(Enum): FULL_TEXT = 1 FULL_REGEX = 2 PARTIAL_REGEX = 3 class TestNode(): def __init__(self, i, datadir, *, rpchost, timewait, bitcoinexodusd, bitcoinexodus_cli, mocktime, coverage_dir, extra_conf=None, extra_args=None, use_cli=False): self.index = i self.datadir = datadir self.stdout_dir = os.path.join(self.datadir, "stdout") self.stderr_dir = os.path.join(self.datadir, "stderr") self.rpchost = rpchost self.rpc_timeout = timewait self.binary = bitcoinexodusd self.coverage_dir = coverage_dir if extra_conf != None: append_config(datadir, extra_conf) self.extra_args = extra_args self.args = [ self.binary, "-datadir=" + self.datadir, "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i ] self.cli = TestNodeCLI(bitcoinexodus_cli, self.datadir) self.use_cli = use_cli self.running = False self.process = None self.rpc_connected = False self.rpc = None self.url = None self.log = logging.getLogger('TestFramework.node%d' % i) self.cleanup_on_exit = True self.p2ps = [] def get_deterministic_priv_key(self): PRIV_KEYS = [ ('mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z', 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'), ('msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg', 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'), ('mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP', 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'), ('mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR', 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'), ('msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws', 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'), ('n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi', 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'), ('myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6', 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'), ('mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8', 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'), ('mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg', 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'), ] return PRIV_KEYS[self.index] def _node_msg(self, msg: str) -> str: return "[node %d] %s" % (self.index, msg) def _raise_assertion_error(self, msg: str): raise AssertionError(self._node_msg(msg)) def __del__(self): # the test ends if self.process and self.cleanup_on_exit: # Should only happen on test failure # Avoid using logger, as that may have already been shutdown when # this destructor is called. print(self._node_msg("Cleaning up leftover process")) self.process.kill() def __getattr__(self, name): if self.use_cli: return getattr(self.cli, name) else: assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection") return getattr(self.rpc, name) def start(self, extra_args=None, *, stdout=None, stderr=None, **kwargs): if extra_args is None: extra_args = self.extra_args # Add a new stdout and stderr file each time bitcoinexodusd is started if stderr is None: stderr = tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) if stdout is None: stdout = tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) self.stderr = stderr self.stdout = stdout # Delete any existing cookie file -- if such a file exists (eg due to # unclean shutdown), it will get overwritten anyway by bitcoinexodusd, and # potentially interfere with our attempt to authenticate delete_cookie_file(self.datadir) # add environment variable LIBC_FATAL_STDERR_=1 so that libc errors are written to stderr and not the terminal subp_env = dict(os.environ, LIBC_FATAL_STDERR_="1") self.process = subprocess.Popen(self.args + extra_args, env=subp_env, stdout=stdout, stderr=stderr, **kwargs) self.running = True self.log.debug("bitcoinexodusd started, waiting for RPC to come up") def wait_for_rpc_connection(self): # Poll at a rate of four times per second poll_per_s = 4 for _ in range(poll_per_s * self.rpc_timeout): if self.process.poll() is not None: raise FailedToStartError(self._node_msg( 'bitcoinexodusd exited with status {} during initialization'.format(self.process.returncode))) try: self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir) self.rpc.getblockcount() # If the call to getblockcount() succeeds then the RPC connection is up self.rpc_connected = True self.url = self.rpc.url self.log.debug("RPC successfully started") return except IOError as e: if e.errno != errno.ECONNREFUSED: # Port not yet open? raise # unknown IO error except JSONRPCException as e: # Initialization phase if e.error['code'] != -28: # RPC in warmup? raise # unknown JSON RPC exception except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoinexodusd still starting if "No RPC credentials" not in str(e): raise time.sleep(1.0 / poll_per_s) self._raise_assertion_error("Unable to connect to bitcoinexodusd") def get_wallet_rpc(self, wallet_name): if self.use_cli: return self.cli("-rpcwallet={}".format(wallet_name)) else: assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected") wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name)) return self.rpc / wallet_path def stop_node(self, expected_stderr=''): if not self.running: return self.log.debug("Stopping node") try: self.stop() except http.client.CannotSendRequest: self.log.exception("Unable to stop node.") # Check that stderr is as expected self.stderr.seek(0) stderr = self.stderr.read().decode('utf-8').strip() if stderr != expected_stderr: raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr)) self.stdout.close() self.stderr.close() del self.p2ps[:] def is_node_stopped(self): if not self.running: return True return_code = self.process.poll() if return_code is None: return False # process has stopped. Assert that it didn't return an error code. assert return_code == 0, self._node_msg( "Node returned non-zero exit code (%d) when stopping" % return_code) self.running = False self.process = None self.rpc_connected = False self.rpc = None self.log.debug("Node stopped") return True def wait_until_stopped(self, timeout=BITCOINEXODUSD_PROC_WAIT_TIMEOUT): wait_until(self.is_node_stopped, timeout=timeout) @contextlib.contextmanager def assert_debug_log(self, expected_msgs): debug_log = os.path.join(self.datadir, 'regtest', 'debug.log') with open(debug_log, encoding='utf-8') as dl: dl.seek(0, 2) prev_size = dl.tell() try: yield finally: with open(debug_log, encoding='utf-8') as dl: dl.seek(prev_size) log = dl.read() print_log = " - " + "\n - ".join(log.splitlines()) for expected_msg in expected_msgs: if re.search(re.escape(expected_msg), log, flags=re.MULTILINE) is None: self._raise_assertion_error('Expected message "{}" does not partially match log:\n\n{}\n\n'.format(expected_msg, print_log)) def assert_start_raises_init_error(self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs): with tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) as log_stderr, \ tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) as log_stdout: try: self.start(extra_args, stdout=log_stdout, stderr=log_stderr, *args, **kwargs) self.wait_for_rpc_connection() self.stop_node() self.wait_until_stopped() except FailedToStartError as e: self.log.debug('bitcoinexodusd failed to start: %s', e) self.running = False self.process = None if expected_msg is not None: log_stderr.seek(0) stderr = log_stderr.read().decode('utf-8').strip() if match == ErrorMatch.PARTIAL_REGEX: if re.search(expected_msg, stderr, flags=re.MULTILINE) is None: self._raise_assertion_error( 'Expected message "{}" does not partially match stderr:\n"{}"'.format(expected_msg, stderr)) elif match == ErrorMatch.FULL_REGEX: if re.fullmatch(expected_msg, stderr) is None: self._raise_assertion_error( 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr)) elif match == ErrorMatch.FULL_TEXT: if expected_msg != stderr: self._raise_assertion_error( 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr)) else: if expected_msg is None: assert_msg = "bitcoinexodusd should have exited with an error" else: assert_msg = "bitcoinexodusd should have exited with expected error " + expected_msg self._raise_assertion_error(assert_msg) def node_encrypt_wallet(self, passphrase): self.encryptwallet(passphrase) self.wait_until_stopped() def add_p2p_connection(self, p2p_conn, *, wait_for_verack=True, **kwargs): if 'dstport' not in kwargs: kwargs['dstport'] = p2p_port(self.index) if 'dstaddr' not in kwargs: kwargs['dstaddr'] = '127.0.0.1' p2p_conn.peer_connect(**kwargs)() self.p2ps.append(p2p_conn) if wait_for_verack: p2p_conn.wait_for_verack() return p2p_conn @property def p2p(self): assert self.p2ps, self._node_msg("No p2p connection") return self.p2ps[0] def disconnect_p2ps(self): for p in self.p2ps: p.peer_disconnect() del self.p2ps[:] class TestNodeCLIAttr: def __init__(self, cli, command): self.cli = cli self.command = command def __call__(self, *args, **kwargs): return self.cli.send_cli(self.command, *args, **kwargs) def get_request(self, *args, **kwargs): return lambda: self(*args, **kwargs) class TestNodeCLI(): def __init__(self, binary, datadir): self.options = [] self.binary = binary self.datadir = datadir self.input = None self.log = logging.getLogger('TestFramework.bitcoinexoduscli') def __call__(self, *options, input=None): cli = TestNodeCLI(self.binary, self.datadir) cli.options = [str(o) for o in options] cli.input = input return cli def __getattr__(self, command): return TestNodeCLIAttr(self, command) def batch(self, requests): results = [] for request in requests: try: results.append(dict(result=request())) except JSONRPCException as e: results.append(dict(error=e)) return results def send_cli(self, command=None, *args, **kwargs): pos_args = [str(arg).lower() if type(arg) is bool else str(arg) for arg in args] named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()] assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same bitcoinexodus-cli call" p_args = [self.binary, "-datadir=" + self.datadir] + self.options if named_args: p_args += ["-named"] if command is not None: p_args += [command] p_args += pos_args + named_args self.log.debug("Running bitcoinexodus-cli command: %s" % command) process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) cli_stdout, cli_stderr = process.communicate(input=self.input) returncode = process.poll() if returncode: match = re.match(r'error code: ([-0-9]+)\nerror message:\n(.*)', cli_stderr) if match: code, message = match.groups() raise JSONRPCException(dict(code=int(code), message=message)) raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr) try: return json.loads(cli_stdout, parse_float=decimal.Decimal) except JSONDecodeError: return cli_stdout.rstrip("\n")
true
true
f703f846d570efe5c6dfdbe6dbeed9ff291746db
10,554
py
Python
skimage/exposure/exposure.py
neurodebian/scikits.image-1
33206f87c5e0208e7ff0d5910ac082b3353fe04e
[ "BSD-3-Clause" ]
null
null
null
skimage/exposure/exposure.py
neurodebian/scikits.image-1
33206f87c5e0208e7ff0d5910ac082b3353fe04e
[ "BSD-3-Clause" ]
null
null
null
skimage/exposure/exposure.py
neurodebian/scikits.image-1
33206f87c5e0208e7ff0d5910ac082b3353fe04e
[ "BSD-3-Clause" ]
null
null
null
import warnings import numpy as np from skimage import img_as_float from skimage.util.dtype import dtype_range, dtype_limits from skimage._shared.utils import deprecated __all__ = ['histogram', 'cumulative_distribution', 'equalize', 'rescale_intensity', 'adjust_gamma', 'adjust_log', 'adjust_sigmoid'] def histogram(image, nbins=256): """Return histogram of image. Unlike `numpy.histogram`, this function returns the centers of bins and does not rebin integer arrays. For integer arrays, each integer value has its own bin, which improves speed and intensity-resolution. The histogram is computed on the flattened image: for color images, the function should be used separately on each channel to obtain a histogram for each color channel. Parameters ---------- image : array Input image. nbins : int Number of bins used to calculate histogram. This value is ignored for integer arrays. Returns ------- hist : array The values of the histogram. bin_centers : array The values at the center of the bins. Examples -------- >>> from skimage import data, exposure, util >>> image = util.img_as_float(data.camera()) >>> np.histogram(image, bins=2) (array([107432, 154712]), array([ 0. , 0.5, 1. ])) >>> exposure.histogram(image, nbins=2) (array([107432, 154712]), array([ 0.25, 0.75])) """ sh = image.shape if len(sh) == 3 and sh[-1] < 4: warnings.warn("This might be a color image. The histogram will be " "computed on the flattened image. You can instead " "apply this function to each color channel.") # For integer types, histogramming with bincount is more efficient. if np.issubdtype(image.dtype, np.integer): offset = 0 if np.min(image) < 0: offset = np.min(image) hist = np.bincount(image.ravel() - offset) bin_centers = np.arange(len(hist)) + offset # clip histogram to start with a non-zero bin idx = np.nonzero(hist)[0][0] return hist[idx:], bin_centers[idx:] else: hist, bin_edges = np.histogram(image.flat, nbins) bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2. return hist, bin_centers def cumulative_distribution(image, nbins=256): """Return cumulative distribution function (cdf) for the given image. Parameters ---------- image : array Image array. nbins : int Number of bins for image histogram. Returns ------- img_cdf : array Values of cumulative distribution function. bin_centers : array Centers of bins. References ---------- .. [1] http://en.wikipedia.org/wiki/Cumulative_distribution_function """ hist, bin_centers = histogram(image, nbins) img_cdf = hist.cumsum() img_cdf = img_cdf / float(img_cdf[-1]) return img_cdf, bin_centers @deprecated('equalize_hist') def equalize(image, nbins=256): return equalize_hist(image, nbins) def equalize_hist(image, nbins=256): """Return image after histogram equalization. Parameters ---------- image : array Image array. nbins : int Number of bins for image histogram. Returns ------- out : float array Image array after histogram equalization. Notes ----- This function is adapted from [1]_ with the author's permission. References ---------- .. [1] http://www.janeriksolem.net/2009/06/histogram-equalization-with-python-and.html .. [2] http://en.wikipedia.org/wiki/Histogram_equalization """ image = img_as_float(image) cdf, bin_centers = cumulative_distribution(image, nbins) out = np.interp(image.flat, bin_centers, cdf) return out.reshape(image.shape) def rescale_intensity(image, in_range=None, out_range=None): """Return image after stretching or shrinking its intensity levels. The image intensities are uniformly rescaled such that the minimum and maximum values given by `in_range` match those given by `out_range`. Parameters ---------- image : array Image array. in_range : 2-tuple (float, float) Min and max *allowed* intensity values of input image. If None, the *allowed* min/max values are set to the *actual* min/max values in the input image. out_range : 2-tuple (float, float) Min and max intensity values of output image. If None, use the min/max intensities of the image data type. See `skimage.util.dtype` for details. Returns ------- out : array Image array after rescaling its intensity. This image is the same dtype as the input image. Examples -------- By default, intensities are stretched to the limits allowed by the dtype: >>> image = np.array([51, 102, 153], dtype=np.uint8) >>> rescale_intensity(image) array([ 0, 127, 255], dtype=uint8) It's easy to accidentally convert an image dtype from uint8 to float: >>> 1.0 * image array([ 51., 102., 153.]) Use `rescale_intensity` to rescale to the proper range for float dtypes: >>> image_float = 1.0 * image >>> rescale_intensity(image_float) array([ 0. , 0.5, 1. ]) To maintain the low contrast of the original, use the `in_range` parameter: >>> rescale_intensity(image_float, in_range=(0, 255)) array([ 0.2, 0.4, 0.6]) If the min/max value of `in_range` is more/less than the min/max image intensity, then the intensity levels are clipped: >>> rescale_intensity(image_float, in_range=(0, 102)) array([ 0.5, 1. , 1. ]) If you have an image with signed integers but want to rescale the image to just the positive range, use the `out_range` parameter: >>> image = np.array([-10, 0, 10], dtype=np.int8) >>> rescale_intensity(image, out_range=(0, 127)) array([ 0, 63, 127], dtype=int8) """ dtype = image.dtype.type if in_range is None: imin = np.min(image) imax = np.max(image) else: imin, imax = in_range if out_range is None: omin, omax = dtype_range[dtype] if imin >= 0: omin = 0 else: omin, omax = out_range image = np.clip(image, imin, imax) image = (image - imin) / float(imax - imin) return dtype(image * (omax - omin) + omin) def _assert_non_negative(image): if np.any(image < 0): raise ValueError('Image Correction methods work correctly only on ' 'images with non-negative values. Use ' 'skimage.exposure.rescale_intensity.') def adjust_gamma(image, gamma=1, gain=1): """Performs Gamma Correction on the input image. Also known as Power Law Transform. This function transforms the input image pixelwise according to the equation ``O = I**gamma`` after scaling each pixel to the range 0 to 1. Parameters ---------- image : ndarray Input image. gamma : float Non negative real number. Default value is 1. gain : float The constant multiplier. Default value is 1. Returns ------- out : ndarray Gamma corrected output image. Notes ----- For gamma greater than 1, the histogram will shift towards left and the output image will be darker than the input image. For gamma less than 1, the histogram will shift towards right and the output image will be brighter than the input image. References ---------- .. [1] http://en.wikipedia.org/wiki/Gamma_correction """ _assert_non_negative(image) dtype = image.dtype.type if gamma < 0: return "Gamma should be a non-negative real number" scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0]) out = ((image / scale) ** gamma) * scale * gain return dtype(out) def adjust_log(image, gain=1, inv=False): """Performs Logarithmic correction on the input image. This function transforms the input image pixelwise according to the equation ``O = gain*log(1 + I)`` after scaling each pixel to the range 0 to 1. For inverse logarithmic correction, the equation is ``O = gain*(2**I - 1)``. Parameters ---------- image : ndarray Input image. gain : float The constant multiplier. Default value is 1. inv : float If True, it performs inverse logarithmic correction, else correction will be logarithmic. Defaults to False. Returns ------- out : ndarray Logarithm corrected output image. References ---------- .. [1] http://www.ece.ucsb.edu/Faculty/Manjunath/courses/ece178W03/EnhancePart1.pdf """ _assert_non_negative(image) dtype = image.dtype.type scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0]) if inv: out = (2 ** (image / scale) - 1) * scale * gain return dtype(out) out = np.log2(1 + image / scale) * scale * gain return dtype(out) def adjust_sigmoid(image, cutoff=0.5, gain=10, inv=False): """Performs Sigmoid Correction on the input image. Also known as Contrast Adjustment. This function transforms the input image pixelwise according to the equation ``O = 1/(1 + exp*(gain*(cutoff - I)))`` after scaling each pixel to the range 0 to 1. Parameters ---------- image : ndarray Input image. cutoff : float Cutoff of the sigmoid function that shifts the characteristic curve in horizontal direction. Default value is 0.5. gain : float The constant multiplier in exponential's power of sigmoid function. Default value is 10. inv : bool If True, returns the negative sigmoid correction. Defaults to False. Returns ------- out : ndarray Sigmoid corrected output image. References ---------- .. [1] Gustav J. Braun, "Image Lightness Rescaling Using Sigmoidal Contrast Enhancement Functions", http://www.cis.rit.edu/fairchild/PDFs/PAP07.pdf """ _assert_non_negative(image) dtype = image.dtype.type scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0]) if inv: out = (1 - 1 / (1 + np.exp(gain * (cutoff - image/scale)))) * scale return dtype(out) out = (1 / (1 + np.exp(gain * (cutoff - image/scale)))) * scale return dtype(out)
29.646067
90
0.631325
import warnings import numpy as np from skimage import img_as_float from skimage.util.dtype import dtype_range, dtype_limits from skimage._shared.utils import deprecated __all__ = ['histogram', 'cumulative_distribution', 'equalize', 'rescale_intensity', 'adjust_gamma', 'adjust_log', 'adjust_sigmoid'] def histogram(image, nbins=256): sh = image.shape if len(sh) == 3 and sh[-1] < 4: warnings.warn("This might be a color image. The histogram will be " "computed on the flattened image. You can instead " "apply this function to each color channel.") if np.issubdtype(image.dtype, np.integer): offset = 0 if np.min(image) < 0: offset = np.min(image) hist = np.bincount(image.ravel() - offset) bin_centers = np.arange(len(hist)) + offset idx = np.nonzero(hist)[0][0] return hist[idx:], bin_centers[idx:] else: hist, bin_edges = np.histogram(image.flat, nbins) bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2. return hist, bin_centers def cumulative_distribution(image, nbins=256): hist, bin_centers = histogram(image, nbins) img_cdf = hist.cumsum() img_cdf = img_cdf / float(img_cdf[-1]) return img_cdf, bin_centers @deprecated('equalize_hist') def equalize(image, nbins=256): return equalize_hist(image, nbins) def equalize_hist(image, nbins=256): image = img_as_float(image) cdf, bin_centers = cumulative_distribution(image, nbins) out = np.interp(image.flat, bin_centers, cdf) return out.reshape(image.shape) def rescale_intensity(image, in_range=None, out_range=None): dtype = image.dtype.type if in_range is None: imin = np.min(image) imax = np.max(image) else: imin, imax = in_range if out_range is None: omin, omax = dtype_range[dtype] if imin >= 0: omin = 0 else: omin, omax = out_range image = np.clip(image, imin, imax) image = (image - imin) / float(imax - imin) return dtype(image * (omax - omin) + omin) def _assert_non_negative(image): if np.any(image < 0): raise ValueError('Image Correction methods work correctly only on ' 'images with non-negative values. Use ' 'skimage.exposure.rescale_intensity.') def adjust_gamma(image, gamma=1, gain=1): _assert_non_negative(image) dtype = image.dtype.type if gamma < 0: return "Gamma should be a non-negative real number" scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0]) out = ((image / scale) ** gamma) * scale * gain return dtype(out) def adjust_log(image, gain=1, inv=False): _assert_non_negative(image) dtype = image.dtype.type scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0]) if inv: out = (2 ** (image / scale) - 1) * scale * gain return dtype(out) out = np.log2(1 + image / scale) * scale * gain return dtype(out) def adjust_sigmoid(image, cutoff=0.5, gain=10, inv=False): _assert_non_negative(image) dtype = image.dtype.type scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0]) if inv: out = (1 - 1 / (1 + np.exp(gain * (cutoff - image/scale)))) * scale return dtype(out) out = (1 / (1 + np.exp(gain * (cutoff - image/scale)))) * scale return dtype(out)
true
true
f703f87ff915b3932ce7f6187f17cfb3996faefe
8,468
py
Python
tests/sources/tools/perception/object_tracking_2d/deep_sort/test_object_tracking_2d_deep_sort.py
daoran/opendr
bca25f6a43244fe9c219a24576181f94a0726923
[ "Apache-2.0" ]
null
null
null
tests/sources/tools/perception/object_tracking_2d/deep_sort/test_object_tracking_2d_deep_sort.py
daoran/opendr
bca25f6a43244fe9c219a24576181f94a0726923
[ "Apache-2.0" ]
null
null
null
tests/sources/tools/perception/object_tracking_2d/deep_sort/test_object_tracking_2d_deep_sort.py
daoran/opendr
bca25f6a43244fe9c219a24576181f94a0726923
[ "Apache-2.0" ]
null
null
null
# Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import unittest import shutil import torch from opendr.perception.object_tracking_2d import ObjectTracking2DDeepSortLearner from opendr.perception.object_tracking_2d import ( Market1501Dataset, Market1501DatasetIterator, ) from opendr.perception.object_tracking_2d import ( MotDataset, RawMotWithDetectionsDatasetIterator, ) import os DEVICE = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' print("Using device:", DEVICE) print("Using device:", DEVICE, file=sys.stderr) def rmfile(path): try: os.remove(path) except OSError as e: print("Error: %s - %s." % (e.filename, e.strerror)) def rmdir(_dir): try: shutil.rmtree(_dir) except OSError as e: print("Error: %s - %s." % (e.filename, e.strerror)) class TestObjectTracking2DDeepSortLearner(unittest.TestCase): @classmethod def setUpClass(cls): cls.temp_dir = os.path.join("tests", "sources", "tools", "perception", "object_tracking_2d", "deep_sort", "deep_sort_temp") cls.train_split_paths = { "nano_mot20": os.path.join( ".", "src", "opendr", "perception", "object_tracking_2d", "datasets", "splits", "nano_mot20.train" ) } cls.model_names = [ "deep_sort", ] cls.mot_dataset_path = MotDataset.download_nano_mot20( os.path.join(cls.temp_dir, "mot_dataset"), True ).path cls.market1501_dataset_path = Market1501Dataset.download_nano_market1501( os.path.join(cls.temp_dir, "market1501_dataset"), True ).path print("Dataset downloaded", file=sys.stderr) for model_name in cls.model_names: ObjectTracking2DDeepSortLearner.download( model_name, cls.temp_dir ) print("Models downloaded", file=sys.stderr) @classmethod def tearDownClass(cls): # Clean up downloaded files rmdir(os.path.join(cls.temp_dir)) def test_fit(self): def test_model(name): dataset = Market1501Dataset(self.market1501_dataset_path) learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) starting_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() learner.fit( dataset, epochs=2, val_epochs=2, verbose=True, ) new_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() self.assertFalse(torch.equal(starting_param, new_param)) print("Fit", name, "ok", file=sys.stderr) for name in self.model_names: test_model(name) def test_fit_iterator(self): def test_model(name): dataset = Market1501DatasetIterator( os.path.join(self.market1501_dataset_path, "bounding_box_train"), ) eval_dataset = Market1501DatasetIterator( os.path.join(self.market1501_dataset_path, "bounding_box_test"), ) learner = ObjectTracking2DDeepSortLearner( checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, ) starting_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() learner.fit( dataset, epochs=2, val_dataset=eval_dataset, val_epochs=2, verbose=True, ) new_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() self.assertFalse(torch.equal(starting_param, new_param)) print("Fit iterator", name, "ok", file=sys.stderr) for name in self.model_names: test_model(name) def test_eval(self): def test_model(name): model_path = os.path.join(self.temp_dir, name) train_split_paths = { "nano_mot20": os.path.join( ".", "src", "opendr", "perception", "object_tracking_2d", "datasets", "splits", "nano_mot20.train" ) } dataset = RawMotWithDetectionsDatasetIterator( self.mot_dataset_path, train_split_paths ) learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner.load(model_path, verbose=True) result = learner.eval(dataset) self.assertGreater(len(result["mota"]), 0) for name in self.model_names: test_model(name) def test_infer(self): def test_model(name): model_path = os.path.join(self.temp_dir, name) train_split_paths = { "nano_mot20": os.path.join( ".", "src", "opendr", "perception", "object_tracking_2d", "datasets", "splits", "nano_mot20.train" ) } dataset = RawMotWithDetectionsDatasetIterator( self.mot_dataset_path, train_split_paths ) learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner.load(model_path, verbose=True) result = learner.infer(dataset[0][0], 1) self.assertTrue(len(result) > 0) learner.reset() result = learner.infer([ dataset[0][0], dataset[1][0], ]) self.assertTrue(len(result) == 2) self.assertTrue(len(result[0]) > 0) for name in self.model_names: test_model(name) def test_save(self): def test_model(name): model_path = os.path.join(self.temp_dir, "test_save_" + name) save_path = os.path.join(model_path, "save") learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner.save(save_path, True) starting_param_1 = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() learner2 = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner2.load(save_path) new_param = list(learner2.tracker.deepsort.extractor.net.parameters())[0].clone() self.assertTrue(torch.equal(starting_param_1, new_param)) for name in self.model_names: test_model(name) def test_optimize(self): def test_model(name): model_path = os.path.join(self.temp_dir, name) train_split_paths = { "nano_mot20": os.path.join( ".", "src", "opendr", "perception", "object_tracking_2d", "datasets", "splits", "nano_mot20.train" ) } dataset = RawMotWithDetectionsDatasetIterator( self.mot_dataset_path, train_split_paths ) learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner.load(model_path, verbose=True) learner.optimize() result = learner.eval(dataset) self.assertGreater(len(result["mota"]), 0) for name in self.model_names: test_model(name) if __name__ == "__main__": unittest.main()
31.479554
99
0.571682
import sys import unittest import shutil import torch from opendr.perception.object_tracking_2d import ObjectTracking2DDeepSortLearner from opendr.perception.object_tracking_2d import ( Market1501Dataset, Market1501DatasetIterator, ) from opendr.perception.object_tracking_2d import ( MotDataset, RawMotWithDetectionsDatasetIterator, ) import os DEVICE = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' print("Using device:", DEVICE) print("Using device:", DEVICE, file=sys.stderr) def rmfile(path): try: os.remove(path) except OSError as e: print("Error: %s - %s." % (e.filename, e.strerror)) def rmdir(_dir): try: shutil.rmtree(_dir) except OSError as e: print("Error: %s - %s." % (e.filename, e.strerror)) class TestObjectTracking2DDeepSortLearner(unittest.TestCase): @classmethod def setUpClass(cls): cls.temp_dir = os.path.join("tests", "sources", "tools", "perception", "object_tracking_2d", "deep_sort", "deep_sort_temp") cls.train_split_paths = { "nano_mot20": os.path.join( ".", "src", "opendr", "perception", "object_tracking_2d", "datasets", "splits", "nano_mot20.train" ) } cls.model_names = [ "deep_sort", ] cls.mot_dataset_path = MotDataset.download_nano_mot20( os.path.join(cls.temp_dir, "mot_dataset"), True ).path cls.market1501_dataset_path = Market1501Dataset.download_nano_market1501( os.path.join(cls.temp_dir, "market1501_dataset"), True ).path print("Dataset downloaded", file=sys.stderr) for model_name in cls.model_names: ObjectTracking2DDeepSortLearner.download( model_name, cls.temp_dir ) print("Models downloaded", file=sys.stderr) @classmethod def tearDownClass(cls): rmdir(os.path.join(cls.temp_dir)) def test_fit(self): def test_model(name): dataset = Market1501Dataset(self.market1501_dataset_path) learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) starting_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() learner.fit( dataset, epochs=2, val_epochs=2, verbose=True, ) new_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() self.assertFalse(torch.equal(starting_param, new_param)) print("Fit", name, "ok", file=sys.stderr) for name in self.model_names: test_model(name) def test_fit_iterator(self): def test_model(name): dataset = Market1501DatasetIterator( os.path.join(self.market1501_dataset_path, "bounding_box_train"), ) eval_dataset = Market1501DatasetIterator( os.path.join(self.market1501_dataset_path, "bounding_box_test"), ) learner = ObjectTracking2DDeepSortLearner( checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, ) starting_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() learner.fit( dataset, epochs=2, val_dataset=eval_dataset, val_epochs=2, verbose=True, ) new_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() self.assertFalse(torch.equal(starting_param, new_param)) print("Fit iterator", name, "ok", file=sys.stderr) for name in self.model_names: test_model(name) def test_eval(self): def test_model(name): model_path = os.path.join(self.temp_dir, name) train_split_paths = { "nano_mot20": os.path.join( ".", "src", "opendr", "perception", "object_tracking_2d", "datasets", "splits", "nano_mot20.train" ) } dataset = RawMotWithDetectionsDatasetIterator( self.mot_dataset_path, train_split_paths ) learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner.load(model_path, verbose=True) result = learner.eval(dataset) self.assertGreater(len(result["mota"]), 0) for name in self.model_names: test_model(name) def test_infer(self): def test_model(name): model_path = os.path.join(self.temp_dir, name) train_split_paths = { "nano_mot20": os.path.join( ".", "src", "opendr", "perception", "object_tracking_2d", "datasets", "splits", "nano_mot20.train" ) } dataset = RawMotWithDetectionsDatasetIterator( self.mot_dataset_path, train_split_paths ) learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner.load(model_path, verbose=True) result = learner.infer(dataset[0][0], 1) self.assertTrue(len(result) > 0) learner.reset() result = learner.infer([ dataset[0][0], dataset[1][0], ]) self.assertTrue(len(result) == 2) self.assertTrue(len(result[0]) > 0) for name in self.model_names: test_model(name) def test_save(self): def test_model(name): model_path = os.path.join(self.temp_dir, "test_save_" + name) save_path = os.path.join(model_path, "save") learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner.save(save_path, True) starting_param_1 = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone() learner2 = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner2.load(save_path) new_param = list(learner2.tracker.deepsort.extractor.net.parameters())[0].clone() self.assertTrue(torch.equal(starting_param_1, new_param)) for name in self.model_names: test_model(name) def test_optimize(self): def test_model(name): model_path = os.path.join(self.temp_dir, name) train_split_paths = { "nano_mot20": os.path.join( ".", "src", "opendr", "perception", "object_tracking_2d", "datasets", "splits", "nano_mot20.train" ) } dataset = RawMotWithDetectionsDatasetIterator( self.mot_dataset_path, train_split_paths ) learner = ObjectTracking2DDeepSortLearner( temp_path=self.temp_dir, device=DEVICE, ) learner.load(model_path, verbose=True) learner.optimize() result = learner.eval(dataset) self.assertGreater(len(result["mota"]), 0) for name in self.model_names: test_model(name) if __name__ == "__main__": unittest.main()
true
true
f703f900fdeb2d460fabac0b14ae0aab32185ff9
95,093
py
Python
ibflex/Types.py
tobigs/ibflex
1d2f9e99a40db6c8bc561a35899a62246e98edf0
[ "MIT" ]
null
null
null
ibflex/Types.py
tobigs/ibflex
1d2f9e99a40db6c8bc561a35899a62246e98edf0
[ "MIT" ]
null
null
null
ibflex/Types.py
tobigs/ibflex
1d2f9e99a40db6c8bc561a35899a62246e98edf0
[ "MIT" ]
null
null
null
# coding: utf-8 """Python data types for IB Flex format XML data. These class definitions are introspected by ibflex.parser to type-convert IB data. They're dataclasses, made immutable by passing `Frozen=True` to the class decorator. Class attributes are annotated with PEP 484 type hints. Except for the top-level XML elements, i.e. <FlexQueryResponse>, <FlexStatements>, and <FlexStatement>, the Flex format cleanly differentiates between data-bearing elements and container elements. Data elements hold their values in XML element attributes; container elements are sequences of child elements (usually data elements, but sometimes other containers). XML element attributes are represented by class attributes hinted with the Python type to which their values should be converted. Almost all are marked `Optional`, since Flex report configuration allows any of them to be included or omitted individually. Default value is `None` for a single value, or an empty tuple for a sequence. Specifically defined enums are an exception; the parser handles missing values for them, so you shouldn't specify a default value. The enums therefore need to come first in the class definition to avoid offending dataclass. Some data elements have XML attributes whose values are sequences delimited by commas or semicolons. These are represented as by class attributes hinted as a variable-length `Tuple` of their sequence item type (`str` or an Enum type). XML container elements are represented as variable-length `Tuple` of contained child type. TODO - need types for: FdicInsuredDepositsByBank ComplexPositions HKIPOSubscriptionActivity PendingExcercises FxTransactions UnbookedTrades RoutingCommissions IBGNoteTransactions Adjustments SoftDollars CFDCharges SLBOpenContracts HKIPOOpenSubscriptions """ # PEP 563 compliance # https://www.python.org/dev/peps/pep-0563/#resolving-type-hints-at-runtime from __future__ import annotations __all__ = [ "FlexElement", "FlexQueryResponse", "FlexStatement", "AccountInformation", "ChangeInNAV", "MTMPerformanceSummaryUnderlying", "EquitySummaryByReportDateInBase", "MTDYTDPerformanceSummaryUnderlying", "CashReportCurrency", "FIFOPerformanceSummaryUnderlying", "NetStockPosition", "UnsettledTransfer", "UnbundledCommissionDetail", "StatementOfFundsLine", "ChangeInPositionValue", "OpenPosition", "FxLot", "Trade", "TradeConfirm", "OptionEAE", "TradeTransfer", "TierInterestDetail", "HardToBorrowDetail", "InterestAccrualsCurrency", "SLBActivity", "Transfer", "CorporateAction", "CashTransaction", "ChangeInDividendAccrual", "OpenDividendAccrual", "SecurityInfo", "ConversionRate", "PriorPeriodPosition", "ClientFee", "ClientFeesDetail", "SalesTax", "DebitCardActivity", "SymbolSummary", "Order" ] import datetime import decimal from dataclasses import dataclass, astuple from typing import Tuple, Optional from ibflex import enums @dataclass(frozen=True) class FlexElement: """ Base class for data element types """ def __iter__(self): return iter(astuple(self)) def items(self): for attr, val in self.__dict__.items(): yield attr, val @dataclass(frozen=True) class FlexQueryResponse(FlexElement): """ Root element """ queryName: str type: str FlexStatements: Tuple["FlexStatement", ...] def __repr__(self): repr = ( f"{type(self).__name__}(" f"queryName={self.queryName!r}, " f"type={self.type!r}, " f"len(FlexStatements)={len(self.FlexStatements)}" ")" ) return repr @dataclass(frozen=True) class FlexStatement(FlexElement): """ Wrapped in <FlexStatements> """ accountId: str fromDate: datetime.date toDate: datetime.date period: str whenGenerated: datetime.datetime AccountInformation: Optional["_AccountInformation"] = None ChangeInNAV: Optional["_ChangeInNAV"] = None CashReport: Tuple["CashReportCurrency", ...] = () MTDYTDPerformanceSummary: Tuple["MTDYTDPerformanceSummaryUnderlying", ...] = () MTMPerformanceSummaryInBase: Tuple["MTMPerformanceSummaryUnderlying", ...] = () EquitySummaryInBase: Tuple["EquitySummaryByReportDateInBase", ...] = () FIFOPerformanceSummaryInBase: Tuple["FIFOPerformanceSummaryUnderlying", ...] = () FdicInsuredDepositsByBank: Tuple = () # TODO StmtFunds: Tuple["StatementOfFundsLine", ...] = () ChangeInPositionValues: Tuple["ChangeInPositionValue", ...] = () OpenPositions: Tuple["OpenPosition", ...] = () NetStockPositionSummary: Tuple["NetStockPosition", ...] = () ComplexPositions: Tuple = () # TODO FxPositions: Tuple["FxLot", ...] = () # N.B. FXLot wrapped in FxLots Trades: Tuple["Trade", ...] = () HKIPOSubscriptionActivity: Tuple = () # TODO TradeConfirms: Tuple["TradeConfirm", ...] = () TransactionTaxes: Tuple = () OptionEAE: Tuple["_OptionEAE", ...] = () # Not a typo - they really spell it "Excercises" PendingExcercises: Tuple = () # TODO TradeTransfers: Tuple["TradeTransfer", ...] = () FxTransactions: Tuple = () # TODO UnbookedTrades: Tuple = () # TODO RoutingCommissions: Tuple = () # TODO IBGNoteTransactions: Tuple = () # TODO UnsettledTransfers: Tuple["UnsettledTransfer", ...] = () UnbundledCommissionDetails: Tuple["UnbundledCommissionDetail", ...] = () Adjustments: Tuple = () # TODO PriorPeriodPositions: Tuple["PriorPeriodPosition", ...] = () CorporateActions: Tuple["CorporateAction", ...] = () ClientFees: Tuple["ClientFee", ...] = () ClientFeesDetail: Tuple["_ClientFeesDetail", ...] = () DebitCardActivities: Tuple["DebitCardActivity", ...] = () SoftDollars: Tuple = () # TODO CashTransactions: Tuple["CashTransaction", ...] = () SalesTaxes: Tuple["SalesTax", ...] = () CFDCharges: Tuple = () # TODO InterestAccruals: Tuple["InterestAccrualsCurrency", ...] = () TierInterestDetails: Tuple["TierInterestDetail", ...] = () HardToBorrowDetails: Tuple["HardToBorrowDetail", ...] = () HardToBorrowMarkupDetails: Tuple = () SLBOpenContracts: Tuple = () # TODO SLBActivities: Tuple["SLBActivity", ...] = () SLBFees: Tuple["SLBFee", ...] = () Transfers: Tuple["Transfer", ...] = () ChangeInDividendAccruals: Tuple["_ChangeInDividendAccrual", ...] = () OpenDividendAccruals: Tuple["OpenDividendAccrual", ...] = () SecuritiesInfo: Tuple["SecurityInfo", ...] = () ConversionRates: Tuple["ConversionRate", ...] = () HKIPOOpenSubscriptions: Tuple = () # TODO CommissionCredits: Tuple = () # TODO StockGrantActivities: Tuple = () # TODO def __repr__(self): repr = ( f"{type(self).__name__}(" f"accountId={self.accountId!r}, " f"fromDate={self.fromDate!r}, " f"toDate={self.toDate!r}, " f"period={self.period!r}, " f"whenGenerated={self.whenGenerated!r}" ) sequences = ( (k, getattr(self, k)) for k, v in self.__annotations__.items() if hasattr(v, "__origin__") and v.__origin__ is tuple ) nonempty_sequences = ", ".join( f"len({name})={len(value)}" for (name, value) in sequences if value ) if nonempty_sequences: repr += ", " for seq in nonempty_sequences: repr += seq repr += ")" return repr @dataclass(frozen=True) class AccountInformation(FlexElement): """ Child of <FlexStatement> """ accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None name: Optional[str] = None accountType: Optional[str] = None customerType: Optional[str] = None accountCapabilities: Tuple[str, ...] = () tradingPermissions: Tuple[str, ...] = () registeredRepName: Optional[str] = None registeredRepPhone: Optional[str] = None dateOpened: Optional[datetime.date] = None dateFunded: Optional[datetime.date] = None dateClosed: Optional[datetime.date] = None street: Optional[str] = None street2: Optional[str] = None city: Optional[str] = None state: Optional[str] = None country: Optional[str] = None postalCode: Optional[str] = None streetResidentialAddress: Optional[str] = None street2ResidentialAddress: Optional[str] = None cityResidentialAddress: Optional[str] = None stateResidentialAddress: Optional[str] = None countryResidentialAddress: Optional[str] = None postalCodeResidentialAddress: Optional[str] = None masterName: Optional[str] = None ibEntity: Optional[str] = None primaryEmail: Optional[str] = None accountRepName: Optional[str] = None accountRepPhone: Optional[str] = None # Type alias to work around https://github.com/python/mypy/issues/1775 _AccountInformation = AccountInformation @dataclass(frozen=True) class ChangeInNAV(FlexElement): """ Child of <FlexStatement> """ accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None fromDate: Optional[datetime.date] = None toDate: Optional[datetime.date] = None startingValue: Optional[decimal.Decimal] = None mtm: Optional[decimal.Decimal] = None realized: Optional[decimal.Decimal] = None changeInUnrealized: Optional[decimal.Decimal] = None costAdjustments: Optional[decimal.Decimal] = None transferredPnlAdjustments: Optional[decimal.Decimal] = None depositsWithdrawals: Optional[decimal.Decimal] = None internalCashTransfers: Optional[decimal.Decimal] = None assetTransfers: Optional[decimal.Decimal] = None debitCardActivity: Optional[decimal.Decimal] = None billPay: Optional[decimal.Decimal] = None dividends: Optional[decimal.Decimal] = None withholdingTax: Optional[decimal.Decimal] = None withholding871m: Optional[decimal.Decimal] = None withholdingTaxCollected: Optional[decimal.Decimal] = None changeInDividendAccruals: Optional[decimal.Decimal] = None interest: Optional[decimal.Decimal] = None changeInInterestAccruals: Optional[decimal.Decimal] = None advisorFees: Optional[decimal.Decimal] = None brokerFees: Optional[decimal.Decimal] = None changeInBrokerFeeAccruals: Optional[decimal.Decimal] = None clientFees: Optional[decimal.Decimal] = None otherFees: Optional[decimal.Decimal] = None feesReceivables: Optional[decimal.Decimal] = None commissions: Optional[decimal.Decimal] = None commissionReceivables: Optional[decimal.Decimal] = None forexCommissions: Optional[decimal.Decimal] = None transactionTax: Optional[decimal.Decimal] = None taxReceivables: Optional[decimal.Decimal] = None salesTax: Optional[decimal.Decimal] = None softDollars: Optional[decimal.Decimal] = None netFxTrading: Optional[decimal.Decimal] = None fxTranslation: Optional[decimal.Decimal] = None linkingAdjustments: Optional[decimal.Decimal] = None other: Optional[decimal.Decimal] = None endingValue: Optional[decimal.Decimal] = None twr: Optional[decimal.Decimal] = None corporateActionProceeds: Optional[decimal.Decimal] = None commissionCreditsRedemption: Optional[decimal.Decimal] = None grantActivity: Optional[decimal.Decimal] = None excessFundSweep: Optional[decimal.Decimal] = None billableSalesTax: Optional[decimal.Decimal] = None # Type alias to work around https://github.com/python/mypy/issues/1775 _ChangeInNAV = ChangeInNAV @dataclass(frozen=True) class MTMPerformanceSummaryUnderlying(FlexElement): """ Wrapped in <MTMPerformanceSummaryInBase> """ assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None prevCloseQuantity: Optional[decimal.Decimal] = None prevClosePrice: Optional[decimal.Decimal] = None closeQuantity: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None transactionMtm: Optional[decimal.Decimal] = None priorOpenMtm: Optional[decimal.Decimal] = None commissions: Optional[decimal.Decimal] = None other: Optional[decimal.Decimal] = None total: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () corpActionMtm: Optional[decimal.Decimal] = None dividends: Optional[decimal.Decimal] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None otherWithAccruals: Optional[decimal.Decimal] = None totalWithAccruals: Optional[decimal.Decimal] = None @dataclass(frozen=True) class EquitySummaryByReportDateInBase(FlexElement): """ Wrapped in <EquitySummaryInBase> """ accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None reportDate: Optional[datetime.date] = None cash: Optional[decimal.Decimal] = None cashLong: Optional[decimal.Decimal] = None cashShort: Optional[decimal.Decimal] = None slbCashCollateral: Optional[decimal.Decimal] = None slbCashCollateralLong: Optional[decimal.Decimal] = None slbCashCollateralShort: Optional[decimal.Decimal] = None stock: Optional[decimal.Decimal] = None stockLong: Optional[decimal.Decimal] = None stockShort: Optional[decimal.Decimal] = None slbDirectSecuritiesBorrowed: Optional[decimal.Decimal] = None slbDirectSecuritiesBorrowedLong: Optional[decimal.Decimal] = None slbDirectSecuritiesBorrowedShort: Optional[decimal.Decimal] = None slbDirectSecuritiesLent: Optional[decimal.Decimal] = None slbDirectSecuritiesLentLong: Optional[decimal.Decimal] = None slbDirectSecuritiesLentShort: Optional[decimal.Decimal] = None options: Optional[decimal.Decimal] = None optionsLong: Optional[decimal.Decimal] = None optionsShort: Optional[decimal.Decimal] = None bonds: Optional[decimal.Decimal] = None bondsLong: Optional[decimal.Decimal] = None bondsShort: Optional[decimal.Decimal] = None bondInterestAccrualsComponent: Optional[decimal.Decimal] = None bondInterestAccrualsComponentLong: Optional[decimal.Decimal] = None bondInterestAccrualsComponentShort: Optional[decimal.Decimal] = None notes: Optional[decimal.Decimal] = None notesLong: Optional[decimal.Decimal] = None notesShort: Optional[decimal.Decimal] = None interestAccruals: Optional[decimal.Decimal] = None interestAccrualsLong: Optional[decimal.Decimal] = None interestAccrualsShort: Optional[decimal.Decimal] = None softDollars: Optional[decimal.Decimal] = None softDollarsLong: Optional[decimal.Decimal] = None softDollarsShort: Optional[decimal.Decimal] = None dividendAccruals: Optional[decimal.Decimal] = None dividendAccrualsLong: Optional[decimal.Decimal] = None dividendAccrualsShort: Optional[decimal.Decimal] = None total: Optional[decimal.Decimal] = None totalLong: Optional[decimal.Decimal] = None totalShort: Optional[decimal.Decimal] = None commodities: Optional[decimal.Decimal] = None commoditiesLong: Optional[decimal.Decimal] = None commoditiesShort: Optional[decimal.Decimal] = None funds: Optional[decimal.Decimal] = None fundsLong: Optional[decimal.Decimal] = None fundsShort: Optional[decimal.Decimal] = None forexCfdUnrealizedPl: Optional[decimal.Decimal] = None forexCfdUnrealizedPlLong: Optional[decimal.Decimal] = None forexCfdUnrealizedPlShort: Optional[decimal.Decimal] = None brokerInterestAccrualsComponent: Optional[decimal.Decimal] = None brokerCashComponent: Optional[decimal.Decimal] = None brokerFeesAccrualsComponent: Optional[decimal.Decimal] = None brokerFeesAccrualsComponentLong: Optional[decimal.Decimal] = None brokerFeesAccrualsComponentShort: Optional[decimal.Decimal] = None cfdUnrealizedPl: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccount: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountLong: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountShort: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountCashComponent: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountCashComponentLong: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountCashComponentShort: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccruals: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsLong: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsShort: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsComponent: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsComponentLong: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsComponentShort: Optional[decimal.Decimal] = None brokerCashComponentLong: Optional[decimal.Decimal] = None brokerCashComponentShort: Optional[decimal.Decimal] = None brokerInterestAccrualsComponentLong: Optional[decimal.Decimal] = None brokerInterestAccrualsComponentShort: Optional[decimal.Decimal] = None cfdUnrealizedPlLong: Optional[decimal.Decimal] = None cfdUnrealizedPlShort: Optional[decimal.Decimal] = None ipoSubscription: Optional[decimal.Decimal] = None ipoSubscriptionLong: Optional[decimal.Decimal] = None ipoSubscriptionShort: Optional[decimal.Decimal] = None physDel: Optional[decimal.Decimal] = None physDelLong: Optional[decimal.Decimal] = None physDelShort: Optional[decimal.Decimal] = None @dataclass(frozen=True) class MTDYTDPerformanceSummaryUnderlying(FlexElement): """ Wrapped in <MTDYTDPerformanceSummary> """ assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None mtmMTD: Optional[decimal.Decimal] = None mtmYTD: Optional[decimal.Decimal] = None realSTMTD: Optional[decimal.Decimal] = None realSTYTD: Optional[decimal.Decimal] = None realLTMTD: Optional[decimal.Decimal] = None realLTYTD: Optional[decimal.Decimal] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None realizedPnlMTD: Optional[decimal.Decimal] = None realizedCapitalGainsPnlMTD: Optional[decimal.Decimal] = None realizedFxPnlMTD: Optional[decimal.Decimal] = None realizedPnlYTD: Optional[decimal.Decimal] = None realizedCapitalGainsPnlYTD: Optional[decimal.Decimal] = None realizedFxPnlYTD: Optional[decimal.Decimal] = None brokerFees: Optional[decimal.Decimal] = None brokerFeesSec: Optional[decimal.Decimal] = None brokerFeesCom: Optional[decimal.Decimal] = None brokerFeesMTD: Optional[decimal.Decimal] = None brokerFeesYTD: Optional[decimal.Decimal] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class CashReportCurrency(FlexElement): """ Wrapped in <CashReport> """ accountId: Optional[str] = None currency: Optional[str] = None fromDate: Optional[datetime.date] = None toDate: Optional[datetime.date] = None startingCash: Optional[decimal.Decimal] = None startingCashSec: Optional[decimal.Decimal] = None startingCashCom: Optional[decimal.Decimal] = None clientFees: Optional[decimal.Decimal] = None clientFeesSec: Optional[decimal.Decimal] = None clientFeesCom: Optional[decimal.Decimal] = None commissions: Optional[decimal.Decimal] = None commissionsSec: Optional[decimal.Decimal] = None commissionsCom: Optional[decimal.Decimal] = None billableCommissions: Optional[decimal.Decimal] = None billableCommissionsSec: Optional[decimal.Decimal] = None billableCommissionsCom: Optional[decimal.Decimal] = None depositWithdrawals: Optional[decimal.Decimal] = None depositWithdrawalsSec: Optional[decimal.Decimal] = None depositWithdrawalsCom: Optional[decimal.Decimal] = None deposits: Optional[decimal.Decimal] = None depositsSec: Optional[decimal.Decimal] = None depositsCom: Optional[decimal.Decimal] = None withdrawals: Optional[decimal.Decimal] = None withdrawalsSec: Optional[decimal.Decimal] = None withdrawalsCom: Optional[decimal.Decimal] = None accountTransfers: Optional[decimal.Decimal] = None accountTransfersSec: Optional[decimal.Decimal] = None accountTransfersCom: Optional[decimal.Decimal] = None internalTransfers: Optional[decimal.Decimal] = None internalTransfersSec: Optional[decimal.Decimal] = None internalTransfersCom: Optional[decimal.Decimal] = None dividends: Optional[decimal.Decimal] = None dividendsSec: Optional[decimal.Decimal] = None dividendsCom: Optional[decimal.Decimal] = None brokerFees: Optional[decimal.Decimal] = None brokerFeesSec: Optional[decimal.Decimal] = None brokerFeesCom: Optional[decimal.Decimal] = None brokerFeesMTD: Optional[decimal.Decimal] = None brokerFeesYTD: Optional[decimal.Decimal] = None brokerInterest: Optional[decimal.Decimal] = None brokerInterestSec: Optional[decimal.Decimal] = None brokerInterestCom: Optional[decimal.Decimal] = None bondInterest: Optional[decimal.Decimal] = None bondInterestSec: Optional[decimal.Decimal] = None bondInterestCom: Optional[decimal.Decimal] = None cashSettlingMtm: Optional[decimal.Decimal] = None cashSettlingMtmSec: Optional[decimal.Decimal] = None cashSettlingMtmCom: Optional[decimal.Decimal] = None cfdCharges: Optional[decimal.Decimal] = None cfdChargesSec: Optional[decimal.Decimal] = None cfdChargesCom: Optional[decimal.Decimal] = None netTradesSales: Optional[decimal.Decimal] = None netTradesSalesSec: Optional[decimal.Decimal] = None netTradesSalesCom: Optional[decimal.Decimal] = None netTradesPurchases: Optional[decimal.Decimal] = None netTradesPurchasesSec: Optional[decimal.Decimal] = None netTradesPurchasesCom: Optional[decimal.Decimal] = None feesReceivables: Optional[decimal.Decimal] = None feesReceivablesSec: Optional[decimal.Decimal] = None feesReceivablesCom: Optional[decimal.Decimal] = None paymentInLieu: Optional[decimal.Decimal] = None paymentInLieuSec: Optional[decimal.Decimal] = None paymentInLieuCom: Optional[decimal.Decimal] = None transactionTax: Optional[decimal.Decimal] = None transactionTaxSec: Optional[decimal.Decimal] = None transactionTaxCom: Optional[decimal.Decimal] = None withholdingTax: Optional[decimal.Decimal] = None withholdingTaxSec: Optional[decimal.Decimal] = None withholdingTaxCom: Optional[decimal.Decimal] = None fxTranslationGainLoss: Optional[decimal.Decimal] = None fxTranslationGainLossSec: Optional[decimal.Decimal] = None fxTranslationGainLossCom: Optional[decimal.Decimal] = None otherFees: Optional[decimal.Decimal] = None otherFeesSec: Optional[decimal.Decimal] = None otherFeesCom: Optional[decimal.Decimal] = None endingCash: Optional[decimal.Decimal] = None endingCashSec: Optional[decimal.Decimal] = None endingCashCom: Optional[decimal.Decimal] = None endingSettledCash: Optional[decimal.Decimal] = None endingSettledCashSec: Optional[decimal.Decimal] = None endingSettledCashCom: Optional[decimal.Decimal] = None clientFeesMTD: Optional[decimal.Decimal] = None clientFeesYTD: Optional[decimal.Decimal] = None commissionsMTD: Optional[decimal.Decimal] = None commissionsYTD: Optional[decimal.Decimal] = None billableCommissionsMTD: Optional[decimal.Decimal] = None billableCommissionsYTD: Optional[decimal.Decimal] = None depositWithdrawalsMTD: Optional[decimal.Decimal] = None depositWithdrawalsYTD: Optional[decimal.Decimal] = None depositsMTD: Optional[decimal.Decimal] = None depositsYTD: Optional[decimal.Decimal] = None withdrawalsMTD: Optional[decimal.Decimal] = None withdrawalsYTD: Optional[decimal.Decimal] = None accountTransfersMTD: Optional[decimal.Decimal] = None accountTransfersYTD: Optional[decimal.Decimal] = None internalTransfersMTD: Optional[decimal.Decimal] = None internalTransfersYTD: Optional[decimal.Decimal] = None excessFundSweep: Optional[decimal.Decimal] = None excessFundSweepSec: Optional[decimal.Decimal] = None excessFundSweepCom: Optional[decimal.Decimal] = None excessFundSweepMTD: Optional[decimal.Decimal] = None excessFundSweepYTD: Optional[decimal.Decimal] = None dividendsMTD: Optional[decimal.Decimal] = None dividendsYTD: Optional[decimal.Decimal] = None insuredDepositInterestMTD: Optional[decimal.Decimal] = None insuredDepositInterestYTD: Optional[decimal.Decimal] = None brokerInterestMTD: Optional[decimal.Decimal] = None brokerInterestYTD: Optional[decimal.Decimal] = None bondInterestMTD: Optional[decimal.Decimal] = None bondInterestYTD: Optional[decimal.Decimal] = None cashSettlingMtmMTD: Optional[decimal.Decimal] = None cashSettlingMtmYTD: Optional[decimal.Decimal] = None realizedVmMTD: Optional[decimal.Decimal] = None realizedVmYTD: Optional[decimal.Decimal] = None cfdChargesMTD: Optional[decimal.Decimal] = None cfdChargesYTD: Optional[decimal.Decimal] = None netTradesSalesMTD: Optional[decimal.Decimal] = None netTradesSalesYTD: Optional[decimal.Decimal] = None advisorFeesMTD: Optional[decimal.Decimal] = None advisorFeesYTD: Optional[decimal.Decimal] = None feesReceivablesMTD: Optional[decimal.Decimal] = None feesReceivablesYTD: Optional[decimal.Decimal] = None netTradesPurchasesMTD: Optional[decimal.Decimal] = None netTradesPurchasesYTD: Optional[decimal.Decimal] = None paymentInLieuMTD: Optional[decimal.Decimal] = None paymentInLieuYTD: Optional[decimal.Decimal] = None transactionTaxMTD: Optional[decimal.Decimal] = None transactionTaxYTD: Optional[decimal.Decimal] = None taxReceivablesMTD: Optional[decimal.Decimal] = None taxReceivablesYTD: Optional[decimal.Decimal] = None withholdingTaxMTD: Optional[decimal.Decimal] = None withholdingTaxYTD: Optional[decimal.Decimal] = None withholding871mMTD: Optional[decimal.Decimal] = None withholding871mYTD: Optional[decimal.Decimal] = None withholdingCollectedTaxMTD: Optional[decimal.Decimal] = None withholdingCollectedTaxYTD: Optional[decimal.Decimal] = None salesTaxMTD: Optional[decimal.Decimal] = None salesTaxYTD: Optional[decimal.Decimal] = None otherFeesMTD: Optional[decimal.Decimal] = None otherFeesYTD: Optional[decimal.Decimal] = None acctAlias: Optional[str] = None model: Optional[str] = None avgCreditBalance: Optional[decimal.Decimal] = None avgCreditBalanceSec: Optional[decimal.Decimal] = None avgCreditBalanceCom: Optional[decimal.Decimal] = None avgDebitBalance: Optional[decimal.Decimal] = None avgDebitBalanceSec: Optional[decimal.Decimal] = None avgDebitBalanceCom: Optional[decimal.Decimal] = None linkingAdjustments: Optional[decimal.Decimal] = None linkingAdjustmentsSec: Optional[decimal.Decimal] = None linkingAdjustmentsCom: Optional[decimal.Decimal] = None insuredDepositInterest: Optional[decimal.Decimal] = None insuredDepositInterestSec: Optional[decimal.Decimal] = None insuredDepositInterestCom: Optional[decimal.Decimal] = None realizedVm: Optional[decimal.Decimal] = None realizedVmSec: Optional[decimal.Decimal] = None realizedVmCom: Optional[decimal.Decimal] = None advisorFees: Optional[decimal.Decimal] = None advisorFeesSec: Optional[decimal.Decimal] = None advisorFeesCom: Optional[decimal.Decimal] = None taxReceivables: Optional[decimal.Decimal] = None taxReceivablesSec: Optional[decimal.Decimal] = None taxReceivablesCom: Optional[decimal.Decimal] = None withholding871m: Optional[decimal.Decimal] = None withholding871mSec: Optional[decimal.Decimal] = None withholding871mCom: Optional[decimal.Decimal] = None withholdingCollectedTax: Optional[decimal.Decimal] = None withholdingCollectedTaxSec: Optional[decimal.Decimal] = None withholdingCollectedTaxCom: Optional[decimal.Decimal] = None salesTax: Optional[decimal.Decimal] = None salesTaxSec: Optional[decimal.Decimal] = None salesTaxCom: Optional[decimal.Decimal] = None other: Optional[decimal.Decimal] = None otherSec: Optional[decimal.Decimal] = None otherCom: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None debitCardActivity: Optional[decimal.Decimal] = None debitCardActivitySec: Optional[decimal.Decimal] = None debitCardActivityCom: Optional[decimal.Decimal] = None debitCardActivityMTD: Optional[decimal.Decimal] = None debitCardActivityYTD: Optional[decimal.Decimal] = None billPay: Optional[decimal.Decimal] = None billPaySec: Optional[decimal.Decimal] = None billPayCom: Optional[decimal.Decimal] = None billPayMTD: Optional[decimal.Decimal] = None billPayYTD: Optional[decimal.Decimal] = None realizedForexVm: Optional[decimal.Decimal] = None realizedForexVmSec: Optional[decimal.Decimal] = None realizedForexVmCom: Optional[decimal.Decimal] = None realizedForexVmMTD: Optional[decimal.Decimal] = None realizedForexVmYTD: Optional[decimal.Decimal] = None ipoSubscription: Optional[decimal.Decimal] = None ipoSubscriptionSec: Optional[decimal.Decimal] = None ipoSubscriptionCom: Optional[decimal.Decimal] = None ipoSubscriptionMTD: Optional[decimal.Decimal] = None ipoSubscriptionYTD: Optional[decimal.Decimal] = None billableSalesTax: Optional[decimal.Decimal] = None billableSalesTaxSec: Optional[decimal.Decimal] = None billableSalesTaxCom: Optional[decimal.Decimal] = None billableSalesTaxMTD: Optional[decimal.Decimal] = None billableSalesTaxYTD: Optional[decimal.Decimal] = None commissionCreditsRedemption: Optional[decimal.Decimal] = None commissionCreditsRedemptionSec: Optional[decimal.Decimal] = None commissionCreditsRedemptionCom: Optional[decimal.Decimal] = None commissionCreditsRedemptionMTD: Optional[decimal.Decimal] = None commissionCreditsRedemptionYTD: Optional[decimal.Decimal] = None @dataclass(frozen=True) class StatementOfFundsLine(FlexElement): """ Wrapped in <StmtFunds> """ assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None balance: Optional[decimal.Decimal] = None debit: Optional[decimal.Decimal] = None credit: Optional[decimal.Decimal] = None currency: Optional[str] = None tradeID: Optional[str] = None # Despite the name, `date` actually contains date/time data. date: Optional[datetime.datetime] = None reportDate: Optional[datetime.date] = None activityDescription: Optional[str] = None amount: Optional[decimal.Decimal] = None buySell: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None fxRateToBase: Optional[decimal.Decimal] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None settleDate: Optional[datetime.date] = None activityCode: Optional[str] = None # FIXME orderID: Optional[str] = None tradeQuantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeGross: Optional[decimal.Decimal] = None tradeCommission: Optional[decimal.Decimal] = None tradeTax: Optional[decimal.Decimal] = None tradeCode: Optional[str] = None levelOfDetail: Optional[str] = None transactionID: Optional[str] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class ChangeInPositionValue(FlexElement): """ Wrapped in <ChangeInPositionValues> """ assetCategory: Optional[enums.AssetClass] = None currency: Optional[str] = None priorPeriodValue: Optional[decimal.Decimal] = None transactions: Optional[decimal.Decimal] = None mtmPriorPeriodPositions: Optional[decimal.Decimal] = None mtmTransactions: Optional[decimal.Decimal] = None corporateActions: Optional[decimal.Decimal] = None accountTransfers: Optional[decimal.Decimal] = None fxTranslationPnl: Optional[decimal.Decimal] = None futurePriceAdjustments: Optional[decimal.Decimal] = None settledCash: Optional[decimal.Decimal] = None endOfPeriodValue: Optional[decimal.Decimal] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None other: Optional[decimal.Decimal] = None linkingAdjustments: Optional[decimal.Decimal] = None @dataclass(frozen=True) class OpenPosition(FlexElement): """ Wrapped in <OpenPositions> """ side: Optional[enums.LongShort] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None multiplier: Optional[decimal.Decimal] = None position: Optional[decimal.Decimal] = None markPrice: Optional[decimal.Decimal] = None positionValue: Optional[decimal.Decimal] = None openPrice: Optional[decimal.Decimal] = None costBasisPrice: Optional[decimal.Decimal] = None costBasisMoney: Optional[decimal.Decimal] = None fifoPnlUnrealized: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None openDateTime: Optional[datetime.datetime] = None holdingPeriodDateTime: Optional[datetime.datetime] = None securityIDType: Optional[str] = None issuer: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None code: Tuple[enums.Code, ...] = () originatingOrderID: Optional[str] = None originatingTransactionID: Optional[str] = None accruedInt: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None sedol: Optional[str] = None percentOfNAV: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None positionValueInBase: Optional[decimal.Decimal] = None unrealizedCapitalGainsPnl: Optional[decimal.Decimal] = None unrealizedlFxPnl: Optional[decimal.Decimal] = None vestingDate: Optional[datetime.date] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class FxLot(FlexElement): """ Wrapped in <FxLots>, which in turn is wrapped in <FxPositions> """ assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None reportDate: Optional[datetime.date] = None functionalCurrency: Optional[str] = None fxCurrency: Optional[str] = None quantity: Optional[decimal.Decimal] = None costPrice: Optional[decimal.Decimal] = None costBasis: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None value: Optional[decimal.Decimal] = None unrealizedPL: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () lotDescription: Optional[str] = None lotOpenDateTime: Optional[datetime.datetime] = None levelOfDetail: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None @dataclass(frozen=True) class Trade(FlexElement): """ Wrapped in <Trades> """ transactionType: Optional[enums.TradeType] = None openCloseIndicator: Optional[enums.OpenClose] = None buySell: Optional[enums.BuySell] = None orderType: Optional[enums.OrderType] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None tradeID: Optional[str] = None reportDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None tradeTime: Optional[datetime.time] = None settleDateTarget: Optional[datetime.date] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeMoney: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None netCash: Optional[decimal.Decimal] = None netCashInBase: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None notes: Tuple[enums.Code, ...] = () # separator = ";" cost: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None origOrderID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None fifoPnlRealized: Optional[decimal.Decimal] = None capitalGainsPnl: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None ibOrderID: Optional[str] = None # Despite the name, `orderTime` actually contains date/time data. orderTime: Optional[datetime.datetime] = None changeInPrice: Optional[decimal.Decimal] = None changeInQuantity: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None clearingFirmID: Optional[str] = None # Effective 2013, every Trade has a `transactionID` attribute that can't # be deselected in the Flex query template. transactionID: Optional[str] = None holdingPeriodDateTime: Optional[datetime.datetime] = None ibExecID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None exchOrderId: Optional[str] = None extExecID: Optional[str] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None acctAlias: Optional[str] = None model: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None principalAdjustFactor: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None sedol: Optional[str] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None accruedInt: Optional[decimal.Decimal] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class Lot(FlexElement): """ Wrapped in <Trades> """ transactionType: Optional[enums.TradeType] = None openCloseIndicator: Optional[enums.OpenClose] = None buySell: Optional[enums.BuySell] = None orderType: Optional[enums.OrderType] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None tradeID: Optional[str] = None reportDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None tradeTime: Optional[datetime.time] = None settleDateTarget: Optional[datetime.date] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeMoney: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None netCash: Optional[decimal.Decimal] = None netCashInBase: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None notes: Tuple[enums.Code, ...] = () # separator = ";" cost: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None origOrderID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None fifoPnlRealized: Optional[decimal.Decimal] = None capitalGainsPnl: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None ibOrderID: Optional[str] = None # Despite the name, `orderTime` actually contains date/time data. orderTime: Optional[datetime.datetime] = None changeInPrice: Optional[decimal.Decimal] = None changeInQuantity: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None clearingFirmID: Optional[str] = None # Effective 2013, every Trade has a `transactionID` attribute that can't # be deselected in the Flex query template. transactionID: Optional[str] = None holdingPeriodDateTime: Optional[datetime.datetime] = None ibExecID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None exchOrderId: Optional[str] = None extExecID: Optional[str] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None acctAlias: Optional[str] = None model: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None principalAdjustFactor: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None sedol: Optional[str] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None @dataclass(frozen=True) class UnbundledCommissionDetail(FlexElement): """ Wrapped in <UnbundledCommissionDetails> """ buySell: Optional[enums.BuySell] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None price: Optional[decimal.Decimal] = None tradeID: Optional[str] = None orderReference: Optional[str] = None totalCommission: Optional[decimal.Decimal] = None brokerExecutionCharge: Optional[decimal.Decimal] = None brokerClearingCharge: Optional[decimal.Decimal] = None thirdPartyExecutionCharge: Optional[decimal.Decimal] = None thirdPartyClearingCharge: Optional[decimal.Decimal] = None thirdPartyRegulatoryCharge: Optional[decimal.Decimal] = None regFINRATradingActivityFee: Optional[decimal.Decimal] = None regSection31TransactionFee: Optional[decimal.Decimal] = None regOther: Optional[decimal.Decimal] = None other: Optional[decimal.Decimal] = None @dataclass(frozen=True) class SymbolSummary(FlexElement): """ Wrapped in <TradeConfirms> """ accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None transactionType: Optional[enums.TradeType] = None tradeID: Optional[str] = None orderID: Optional[decimal.Decimal] = None execID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None clearingFirmID: Optional[str] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None # Despite the name, `orderTime` actually contains date/time data. orderTime: Optional[datetime.datetime] = None dateTime: Optional[datetime.datetime] = None reportDate: Optional[datetime.date] = None settleDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None exchange: Optional[str] = None buySell: Optional[enums.BuySell] = None quantity: Optional[decimal.Decimal] = None price: Optional[decimal.Decimal] = None amount: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None commission: Optional[decimal.Decimal] = None brokerExecutionCommission: Optional[decimal.Decimal] = None brokerClearingCommission: Optional[decimal.Decimal] = None thirdPartyExecutionCommission: Optional[decimal.Decimal] = None thirdPartyClearingCommission: Optional[decimal.Decimal] = None thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None otherCommission: Optional[decimal.Decimal] = None commissionCurrency: Optional[str] = None tax: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () orderType: Optional[enums.OrderType] = None levelOfDetail: Optional[str] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None allocatedTo: Optional[str] = None accruedInt: Optional[decimal.Decimal] = None @dataclass(frozen=True) class Order(FlexElement): """ Wrapped in <TradeConfirms> or <Trades>""" accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None transactionType: Optional[enums.TradeType] = None tradeID: Optional[str] = None orderID: Optional[decimal.Decimal] = None execID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None clearingFirmID: Optional[str] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None # Despite the name, `orderTime` actually contains date/time data. orderTime: Optional[datetime.datetime] = None dateTime: Optional[datetime.datetime] = None reportDate: Optional[datetime.date] = None settleDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None exchange: Optional[str] = None buySell: Optional[enums.BuySell] = None quantity: Optional[decimal.Decimal] = None price: Optional[decimal.Decimal] = None amount: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None commission: Optional[decimal.Decimal] = None brokerExecutionCommission: Optional[decimal.Decimal] = None brokerClearingCommission: Optional[decimal.Decimal] = None thirdPartyExecutionCommission: Optional[decimal.Decimal] = None thirdPartyClearingCommission: Optional[decimal.Decimal] = None thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None otherCommission: Optional[decimal.Decimal] = None commissionCurrency: Optional[str] = None tax: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () orderType: Optional[enums.OrderType] = None levelOfDetail: Optional[str] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None allocatedTo: Optional[str] = None accruedInt: Optional[decimal.Decimal] = None netCash: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibOrderID: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None settleDateTarget: Optional[datetime.date] = None tradeMoney: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None closePrice: Optional[decimal.Decimal] = None openCloseIndicator: Optional[enums.OpenClose] = None notes: Optional[str] = None cost: Optional[decimal.Decimal] = None fifoPnlRealized: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None origOrderID: Optional[str] = None transactionID: Optional[str] = None ibExecID: Optional[str] = None exchOrderId: Optional[str] = None extExecID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None holdingPeriodDateTime: Optional[datetime.datetime] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None changeInPrice: Optional[decimal.Decimal] = None changeInQuantity: Optional[decimal.Decimal] = None @dataclass(frozen=True) class TradeConfirm(FlexElement): """ Wrapped in <TradeConfirms> """ transactionType: Optional[enums.TradeType] = None openCloseIndicator: Optional[enums.OpenClose] = None buySell: Optional[enums.BuySell] = None orderType: Optional[enums.OrderType] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None tradeID: Optional[str] = None reportDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None tradeTime: Optional[datetime.time] = None settleDateTarget: Optional[datetime.date] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeMoney: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None netCash: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None notes: Tuple[enums.Code, ...] = () # separator = ";" cost: Optional[decimal.Decimal] = None fifoPnlRealized: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None origOrderID: Optional[str] = None clearingFirmID: Optional[str] = None transactionID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None holdingPeriodDateTime: Optional[datetime.datetime] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None levelOfDetail: Optional[str] = None commissionCurrency: Optional[str] = None price: Optional[decimal.Decimal] = None thirdPartyClearingCommission: Optional[decimal.Decimal] = None orderID: Optional[decimal.Decimal] = None allocatedTo: Optional[str] = None thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None brokerExecutionCommission: Optional[decimal.Decimal] = None thirdPartyExecutionCommission: Optional[decimal.Decimal] = None amount: Optional[decimal.Decimal] = None otherCommission: Optional[decimal.Decimal] = None commission: Optional[decimal.Decimal] = None brokerClearingCommission: Optional[decimal.Decimal] = None ibOrderID: Optional[str] = None ibExecID: Optional[str] = None execID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None exchOrderId: Optional[str] = None extExecID: Optional[str] = None # Despite the name, `orderTime` actually contains date/time data. orderTime: Optional[datetime.datetime] = None changeInPrice: Optional[decimal.Decimal] = None changeInQuantity: Optional[decimal.Decimal] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None code: Tuple[enums.Code, ...] = () tax: Optional[decimal.Decimal] = None listingExchange: Optional[str] = None underlyingListingExchange: Optional[str] = None settleDate: Optional[datetime.date] = None underlyingSecurityID: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None accruedInt: Optional[decimal.Decimal] = None @dataclass(frozen=True) class OptionEAE(FlexElement): """Option Exercise Assignment or Expiration Wrapped in (identically-named) <OptionEAE> """ transactionType: Optional[enums.OptionAction] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.date] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None markPrice: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None commisionsAndTax: Optional[decimal.Decimal] = None costBasis: Optional[decimal.Decimal] = None realizedPnl: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None tradeID: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None # Type alias to work around https://github.com/python/mypy/issues/1775 _OptionEAE = OptionEAE @dataclass(frozen=True) class TradeTransfer(FlexElement): """ Wrapped in <TradeTransfers> """ transactionType: Optional[enums.TradeType] = None openCloseIndicator: Optional[enums.OpenClose] = None direction: Optional[enums.ToFrom] = None deliveredReceived: Optional[enums.DeliveredReceived] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None tradeID: Optional[str] = None reportDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None tradeTime: Optional[datetime.time] = None settleDateTarget: Optional[datetime.date] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeMoney: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None closePrice: Optional[decimal.Decimal] = None notes: Tuple[enums.Code, ...] = () # separator = ";" cost: Optional[decimal.Decimal] = None fifoPnlRealized: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None brokerName: Optional[str] = None brokerAccount: Optional[str] = None awayBrokerCommission: Optional[decimal.Decimal] = None regulatoryFee: Optional[decimal.Decimal] = None netTradeMoney: Optional[decimal.Decimal] = None netTradeMoneyInBase: Optional[decimal.Decimal] = None netTradePrice: Optional[decimal.Decimal] = None multiplier: Optional[decimal.Decimal] = None acctAlias: Optional[str] = None model: Optional[str] = None sedol: Optional[str] = None securityID: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None netCash: Optional[decimal.Decimal] = None origTradePrice: Optional[decimal.Decimal] = None # Oddly, `origTradeDate` appears to have hard-coded YYYYMMDD format # instead of the date format from the report configuration. origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None origOrderID: Optional[str] = None clearingFirmID: Optional[str] = None transactionID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None holdingPeriodDateTime: Optional[datetime.datetime] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None levelOfDetail: Optional[str] = None securityIDType: Optional[str] = None @dataclass(frozen=True) class InterestAccrualsCurrency(FlexElement): """ Wrapped in <InterestAccruals> """ accountId: Optional[str] = None currency: Optional[str] = None fromDate: Optional[datetime.date] = None toDate: Optional[datetime.date] = None startingAccrualBalance: Optional[decimal.Decimal] = None interestAccrued: Optional[decimal.Decimal] = None accrualReversal: Optional[decimal.Decimal] = None endingAccrualBalance: Optional[decimal.Decimal] = None acctAlias: Optional[str] = None model: Optional[str] = None fxTranslation: Optional[decimal.Decimal] = None @dataclass(frozen=True) class TierInterestDetail(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None interestType: Optional[str] = None valueDate: Optional[datetime.date] = None tierBreak: Optional[str] = None balanceThreshold: Optional[decimal.Decimal] = None securitiesPrincipal: Optional[decimal.Decimal] = None commoditiesPrincipal: Optional[decimal.Decimal] = None ibuklPrincipal: Optional[decimal.Decimal] = None totalPrincipal: Optional[decimal.Decimal] = None rate: Optional[decimal.Decimal] = None securitiesInterest: Optional[decimal.Decimal] = None commoditiesInterest: Optional[decimal.Decimal] = None ibuklInterest: Optional[decimal.Decimal] = None totalInterest: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () fromAcct: Optional[str] = None toAcct: Optional[str] = None @dataclass(frozen=True) class HardToBorrowDetail(FlexElement): """ Wrapped in <HardToBorrowDetails> """ assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None valueDate: Optional[datetime.date] = None quantity: Optional[decimal.Decimal] = None price: Optional[decimal.Decimal] = None value: Optional[decimal.Decimal] = None borrowFeeRate: Optional[decimal.Decimal] = None borrowFee: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () fromAcct: Optional[str] = None toAcct: Optional[str] = None @dataclass(frozen=True) class SLBActivity(FlexElement): """ Wrapped in <SLBActivities> """ assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.date] = None slbTransactionId: Optional[str] = None activityDescription: Optional[str] = None type: Optional[str] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None feeRate: Optional[decimal.Decimal] = None collateralAmount: Optional[decimal.Decimal] = None markQuantity: Optional[decimal.Decimal] = None markPriorPrice: Optional[decimal.Decimal] = None markCurrentPrice: Optional[decimal.Decimal] = None @dataclass(frozen=True) class SLBFee: """ Wrapped in <SLBFees> """ accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[str] = None assetCategory: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None valueDate: Optional[datetime.date] = None startDate: Optional[datetime.date] = None type: Optional[str] = None # FIXME exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None collateralAmount: Optional[decimal.Decimal] = None feeRate: Optional[decimal.Decimal] = None fee: Optional[decimal.Decimal] = None carryCharge: Optional[decimal.Decimal] = None ticketCharge: Optional[decimal.Decimal] = None totalCharges: Optional[decimal.Decimal] = None marketFeeRate: Optional[decimal.Decimal] = None grossLendFee: Optional[decimal.Decimal] = None netLendFeeRate: Optional[decimal.Decimal] = None netLendFee: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () fromAcct: Optional[str] = None toAcct: Optional[str] = None @dataclass(frozen=True) class Transfer(FlexElement): """ Wrapped in <Transfers> """ type: Optional[enums.TransferType] = None direction: Optional[enums.InOut] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None reportDate: Optional[datetime.date] = None underlyingConid: Optional[str] = None date: Optional[datetime.date] = None dateTime: Optional[datetime.datetime] = None account: Optional[str] = None quantity: Optional[decimal.Decimal] = None transferPrice: Optional[decimal.Decimal] = None positionAmount: Optional[decimal.Decimal] = None positionAmountInBase: Optional[decimal.Decimal] = None capitalGainsPnl: Optional[decimal.Decimal] = None cashTransfer: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () clientReference: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None company: Optional[str] = None accountName: Optional[str] = None pnlAmount: Optional[decimal.Decimal] = None pnlAmountInBase: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None transactionID: Optional[str] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class UnsettledTransfer(FlexElement): """ Wrapped in <UnsettledTransfers> """ direction: Optional[enums.ToFrom] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None underlyingConid: Optional[str] = None stage: Optional[str] = None tradeDate: Optional[datetime.date] = None targetSettlement: Optional[datetime.date] = None contra: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeAmount: Optional[decimal.Decimal] = None tradeAmountInBase: Optional[decimal.Decimal] = None transactionID: Optional[str] = None @dataclass(frozen=True) class PriorPeriodPosition(FlexElement): """ Wrapped in <PriorPeriodPositions> """ assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None priorMtmPnl: Optional[decimal.Decimal] = None date: Optional[datetime.date] = None price: Optional[decimal.Decimal] = None acctAlias: Optional[str] = None model: Optional[str] = None sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None @dataclass(frozen=True) class CorporateAction(FlexElement): """ Wrapped in <CorporateActions> """ assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None actionDescription: Optional[str] = None dateTime: Optional[datetime.datetime] = None amount: Optional[decimal.Decimal] = None quantity: Optional[decimal.Decimal] = None fifoPnlRealized: Optional[decimal.Decimal] = None capitalGainsPnl: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None # Effective 2010, CorporateAction has a `type` attribute type: Optional[enums.Reorg] = None code: Tuple[enums.Code, ...] = () sedol: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None proceeds: Optional[decimal.Decimal] = None value: Optional[decimal.Decimal] = None transactionID: Optional[str] = None @dataclass(frozen=True) class CashTransaction(FlexElement): """ Wrapped in <CashTransactions> """ type: Optional[enums.CashAction] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None amount: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None sedol: Optional[str] = None symbol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None tradeID: Optional[str] = None code: Tuple[enums.Code, ...] = () transactionID: Optional[str] = None reportDate: Optional[datetime.date] = None clientReference: Optional[str] = None settleDate: Optional[datetime.date] = None acctAlias: Optional[str] = None model: Optional[str] = None levelOfDetail: Optional[str] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class DebitCardActivity(FlexElement): """ Wrapped in <DebitCardActivities> """ accountId: Optional[str] = None acctAlias: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None assetCategory: Optional[enums.AssetClass] = None status: Optional[str] = None reportDate: Optional[datetime.date] = None postingDate: Optional[datetime.date] = None transactionDateTime: Optional[datetime.datetime] = None category: Optional[str] = None merchantNameLocation: Optional[str] = None amount: Optional[decimal.Decimal] = None model: Optional[str] = None @dataclass(frozen=True) class ChangeInDividendAccrual(FlexElement): """ Wrapped in <ChangeInDividendAccruals> """ date: Optional[datetime.date] = None assetCategory: Optional[enums.AssetClass] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None accountId: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None reportDate: Optional[datetime.date] = None underlyingConid: Optional[str] = None exDate: Optional[datetime.date] = None payDate: Optional[datetime.date] = None quantity: Optional[decimal.Decimal] = None tax: Optional[decimal.Decimal] = None fee: Optional[decimal.Decimal] = None grossRate: Optional[decimal.Decimal] = None grossAmount: Optional[decimal.Decimal] = None netAmount: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None fromAcct: Optional[str] = None toAcct: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None # Type alias to work around https://github.com/python/mypy/issues/1775 _ChangeInDividendAccrual = ChangeInDividendAccrual @dataclass(frozen=True) class OpenDividendAccrual(FlexElement): """ Wrapped in <OpenDividendAccruals> """ assetCategory: Optional[enums.AssetClass] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None accountId: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None exDate: Optional[datetime.date] = None payDate: Optional[datetime.date] = None quantity: Optional[decimal.Decimal] = None tax: Optional[decimal.Decimal] = None fee: Optional[decimal.Decimal] = None grossRate: Optional[decimal.Decimal] = None grossAmount: Optional[decimal.Decimal] = None netAmount: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None fromAcct: Optional[str] = None toAcct: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class SecurityInfo(FlexElement): """ Wrapped in <SecuritiesInfo> """ assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingCategory: Optional[str] = None subCategory: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None maturity: Optional[str] = None issueDate: Optional[datetime.date] = None type: Optional[str] = None sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () currency: Optional[str] = None settlementPolicyMethod: Optional[str] = None @dataclass(frozen=True) class ConversionRate(FlexElement): """ Wrapped in <ConversionRates> """ reportDate: Optional[datetime.date] = None fromCurrency: Optional[str] = None toCurrency: Optional[str] = None rate: Optional[decimal.Decimal] = None @dataclass(frozen=True) class FIFOPerformanceSummaryUnderlying(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None listingExchange: Optional[str] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None realizedSTProfit: Optional[decimal.Decimal] = None realizedSTLoss: Optional[decimal.Decimal] = None realizedLTProfit: Optional[decimal.Decimal] = None realizedLTLoss: Optional[decimal.Decimal] = None totalRealizedPnl: Optional[decimal.Decimal] = None unrealizedProfit: Optional[decimal.Decimal] = None unrealizedLoss: Optional[decimal.Decimal] = None totalUnrealizedPnl: Optional[decimal.Decimal] = None totalFifoPnl: Optional[decimal.Decimal] = None totalRealizedCapitalGainsPnl: Optional[decimal.Decimal] = None totalRealizedFxPnl: Optional[decimal.Decimal] = None totalUnrealizedCapitalGainsPnl: Optional[decimal.Decimal] = None totalUnrealizedFxPnl: Optional[decimal.Decimal] = None totalCapitalGainsPnl: Optional[decimal.Decimal] = None totalFxPnl: Optional[decimal.Decimal] = None transferredPnl: Optional[decimal.Decimal] = None transferredCapitalGainsPnl: Optional[decimal.Decimal] = None transferredFxPnl: Optional[decimal.Decimal] = None sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None unrealizedSTProfit: Optional[decimal.Decimal] = None unrealizedSTLoss: Optional[decimal.Decimal] = None unrealizedLTProfit: Optional[decimal.Decimal] = None unrealizedLTLoss: Optional[decimal.Decimal] = None costAdj: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class NetStockPosition(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None sharesAtIb: Optional[decimal.Decimal] = None sharesBorrowed: Optional[decimal.Decimal] = None sharesLent: Optional[decimal.Decimal] = None netShares: Optional[decimal.Decimal] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class ClientFee(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None feeType: Optional[str] = None date: Optional[datetime.datetime] = None description: Optional[str] = None expenseIndicator: Optional[str] = None revenue: Optional[decimal.Decimal] = None expense: Optional[decimal.Decimal] = None net: Optional[decimal.Decimal] = None revenueInBase: Optional[decimal.Decimal] = None expenseInBase: Optional[decimal.Decimal] = None netInBase: Optional[decimal.Decimal] = None tradeID: Optional[str] = None execID: Optional[str] = None levelOfDetail: Optional[str] = None @dataclass(frozen=True) class ClientFeesDetail(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None date: Optional[datetime.datetime] = None tradeID: Optional[str] = None execID: Optional[str] = None totalRevenue: Optional[decimal.Decimal] = None totalCommission: Optional[decimal.Decimal] = None brokerExecutionCharge: Optional[decimal.Decimal] = None clearingCharge: Optional[decimal.Decimal] = None thirdPartyExecutionCharge: Optional[decimal.Decimal] = None thirdPartyRegulatoryCharge: Optional[decimal.Decimal] = None regFINRATradingActivityFee: Optional[decimal.Decimal] = None regSection31TransactionFee: Optional[decimal.Decimal] = None regOther: Optional[decimal.Decimal] = None totalNet: Optional[decimal.Decimal] = None totalNetInBase: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None other: Optional[decimal.Decimal] = None @dataclass(frozen=True) class TransactionTax(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.datetime] = None taxDescription: Optional[str] = None quantity: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None taxAmount: Optional[decimal.Decimal] = None tradeId: Optional[str] = None tradePrice: Optional[decimal.Decimal] = None source: Optional[str] = None code: Tuple[enums.Code, ...] = () levelOfDetail: Optional[str] = None @dataclass(frozen=True) class TransactionTaxDetail(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.datetime] = None taxDescription: Optional[str] = None quantity: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None taxAmount: Optional[decimal.Decimal] = None tradeId: Optional[str] = None tradePrice: Optional[decimal.Decimal] = None source: Optional[str] = None code: Tuple[enums.Code, ...] = () levelOfDetail: Optional[str] = None @dataclass(frozen=True) class SalesTax(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.date] = None country: Optional[str] = None taxType: Optional[str] = None payer: Optional[str] = None taxableDescription: Optional[str] = None taxableAmount: Optional[decimal.Decimal] = None taxRate: Optional[decimal.Decimal] = None salesTax: Optional[decimal.Decimal] = None taxableTransactionID: Optional[str] = None transactionID: Optional[str] = None code: Tuple[enums.Code, ...] = () # Type alias to work around https://github.com/python/mypy/issues/1775 _ClientFeesDetail = ClientFeesDetail
41.891189
86
0.718875
from __future__ import annotations __all__ = [ "FlexElement", "FlexQueryResponse", "FlexStatement", "AccountInformation", "ChangeInNAV", "MTMPerformanceSummaryUnderlying", "EquitySummaryByReportDateInBase", "MTDYTDPerformanceSummaryUnderlying", "CashReportCurrency", "FIFOPerformanceSummaryUnderlying", "NetStockPosition", "UnsettledTransfer", "UnbundledCommissionDetail", "StatementOfFundsLine", "ChangeInPositionValue", "OpenPosition", "FxLot", "Trade", "TradeConfirm", "OptionEAE", "TradeTransfer", "TierInterestDetail", "HardToBorrowDetail", "InterestAccrualsCurrency", "SLBActivity", "Transfer", "CorporateAction", "CashTransaction", "ChangeInDividendAccrual", "OpenDividendAccrual", "SecurityInfo", "ConversionRate", "PriorPeriodPosition", "ClientFee", "ClientFeesDetail", "SalesTax", "DebitCardActivity", "SymbolSummary", "Order" ] import datetime import decimal from dataclasses import dataclass, astuple from typing import Tuple, Optional from ibflex import enums @dataclass(frozen=True) class FlexElement: def __iter__(self): return iter(astuple(self)) def items(self): for attr, val in self.__dict__.items(): yield attr, val @dataclass(frozen=True) class FlexQueryResponse(FlexElement): queryName: str type: str FlexStatements: Tuple["FlexStatement", ...] def __repr__(self): repr = ( f"{type(self).__name__}(" f"queryName={self.queryName!r}, " f"type={self.type!r}, " f"len(FlexStatements)={len(self.FlexStatements)}" ")" ) return repr @dataclass(frozen=True) class FlexStatement(FlexElement): accountId: str fromDate: datetime.date toDate: datetime.date period: str whenGenerated: datetime.datetime AccountInformation: Optional["_AccountInformation"] = None ChangeInNAV: Optional["_ChangeInNAV"] = None CashReport: Tuple["CashReportCurrency", ...] = () MTDYTDPerformanceSummary: Tuple["MTDYTDPerformanceSummaryUnderlying", ...] = () MTMPerformanceSummaryInBase: Tuple["MTMPerformanceSummaryUnderlying", ...] = () EquitySummaryInBase: Tuple["EquitySummaryByReportDateInBase", ...] = () FIFOPerformanceSummaryInBase: Tuple["FIFOPerformanceSummaryUnderlying", ...] = () FdicInsuredDepositsByBank: Tuple = () StmtFunds: Tuple["StatementOfFundsLine", ...] = () ChangeInPositionValues: Tuple["ChangeInPositionValue", ...] = () OpenPositions: Tuple["OpenPosition", ...] = () NetStockPositionSummary: Tuple["NetStockPosition", ...] = () ComplexPositions: Tuple = () FxPositions: Tuple["FxLot", ...] = () Trades: Tuple["Trade", ...] = () HKIPOSubscriptionActivity: Tuple = () TradeConfirms: Tuple["TradeConfirm", ...] = () TransactionTaxes: Tuple = () OptionEAE: Tuple["_OptionEAE", ...] = () PendingExcercises: Tuple = () TradeTransfers: Tuple["TradeTransfer", ...] = () FxTransactions: Tuple = () UnbookedTrades: Tuple = () RoutingCommissions: Tuple = () IBGNoteTransactions: Tuple = () UnsettledTransfers: Tuple["UnsettledTransfer", ...] = () UnbundledCommissionDetails: Tuple["UnbundledCommissionDetail", ...] = () Adjustments: Tuple = () PriorPeriodPositions: Tuple["PriorPeriodPosition", ...] = () CorporateActions: Tuple["CorporateAction", ...] = () ClientFees: Tuple["ClientFee", ...] = () ClientFeesDetail: Tuple["_ClientFeesDetail", ...] = () DebitCardActivities: Tuple["DebitCardActivity", ...] = () SoftDollars: Tuple = () CashTransactions: Tuple["CashTransaction", ...] = () SalesTaxes: Tuple["SalesTax", ...] = () CFDCharges: Tuple = () InterestAccruals: Tuple["InterestAccrualsCurrency", ...] = () TierInterestDetails: Tuple["TierInterestDetail", ...] = () HardToBorrowDetails: Tuple["HardToBorrowDetail", ...] = () HardToBorrowMarkupDetails: Tuple = () SLBOpenContracts: Tuple = () SLBActivities: Tuple["SLBActivity", ...] = () SLBFees: Tuple["SLBFee", ...] = () Transfers: Tuple["Transfer", ...] = () ChangeInDividendAccruals: Tuple["_ChangeInDividendAccrual", ...] = () OpenDividendAccruals: Tuple["OpenDividendAccrual", ...] = () SecuritiesInfo: Tuple["SecurityInfo", ...] = () ConversionRates: Tuple["ConversionRate", ...] = () HKIPOOpenSubscriptions: Tuple = () CommissionCredits: Tuple = () StockGrantActivities: Tuple = () def __repr__(self): repr = ( f"{type(self).__name__}(" f"accountId={self.accountId!r}, " f"fromDate={self.fromDate!r}, " f"toDate={self.toDate!r}, " f"period={self.period!r}, " f"whenGenerated={self.whenGenerated!r}" ) sequences = ( (k, getattr(self, k)) for k, v in self.__annotations__.items() if hasattr(v, "__origin__") and v.__origin__ is tuple ) nonempty_sequences = ", ".join( f"len({name})={len(value)}" for (name, value) in sequences if value ) if nonempty_sequences: repr += ", " for seq in nonempty_sequences: repr += seq repr += ")" return repr @dataclass(frozen=True) class AccountInformation(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None name: Optional[str] = None accountType: Optional[str] = None customerType: Optional[str] = None accountCapabilities: Tuple[str, ...] = () tradingPermissions: Tuple[str, ...] = () registeredRepName: Optional[str] = None registeredRepPhone: Optional[str] = None dateOpened: Optional[datetime.date] = None dateFunded: Optional[datetime.date] = None dateClosed: Optional[datetime.date] = None street: Optional[str] = None street2: Optional[str] = None city: Optional[str] = None state: Optional[str] = None country: Optional[str] = None postalCode: Optional[str] = None streetResidentialAddress: Optional[str] = None street2ResidentialAddress: Optional[str] = None cityResidentialAddress: Optional[str] = None stateResidentialAddress: Optional[str] = None countryResidentialAddress: Optional[str] = None postalCodeResidentialAddress: Optional[str] = None masterName: Optional[str] = None ibEntity: Optional[str] = None primaryEmail: Optional[str] = None accountRepName: Optional[str] = None accountRepPhone: Optional[str] = None _AccountInformation = AccountInformation @dataclass(frozen=True) class ChangeInNAV(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None fromDate: Optional[datetime.date] = None toDate: Optional[datetime.date] = None startingValue: Optional[decimal.Decimal] = None mtm: Optional[decimal.Decimal] = None realized: Optional[decimal.Decimal] = None changeInUnrealized: Optional[decimal.Decimal] = None costAdjustments: Optional[decimal.Decimal] = None transferredPnlAdjustments: Optional[decimal.Decimal] = None depositsWithdrawals: Optional[decimal.Decimal] = None internalCashTransfers: Optional[decimal.Decimal] = None assetTransfers: Optional[decimal.Decimal] = None debitCardActivity: Optional[decimal.Decimal] = None billPay: Optional[decimal.Decimal] = None dividends: Optional[decimal.Decimal] = None withholdingTax: Optional[decimal.Decimal] = None withholding871m: Optional[decimal.Decimal] = None withholdingTaxCollected: Optional[decimal.Decimal] = None changeInDividendAccruals: Optional[decimal.Decimal] = None interest: Optional[decimal.Decimal] = None changeInInterestAccruals: Optional[decimal.Decimal] = None advisorFees: Optional[decimal.Decimal] = None brokerFees: Optional[decimal.Decimal] = None changeInBrokerFeeAccruals: Optional[decimal.Decimal] = None clientFees: Optional[decimal.Decimal] = None otherFees: Optional[decimal.Decimal] = None feesReceivables: Optional[decimal.Decimal] = None commissions: Optional[decimal.Decimal] = None commissionReceivables: Optional[decimal.Decimal] = None forexCommissions: Optional[decimal.Decimal] = None transactionTax: Optional[decimal.Decimal] = None taxReceivables: Optional[decimal.Decimal] = None salesTax: Optional[decimal.Decimal] = None softDollars: Optional[decimal.Decimal] = None netFxTrading: Optional[decimal.Decimal] = None fxTranslation: Optional[decimal.Decimal] = None linkingAdjustments: Optional[decimal.Decimal] = None other: Optional[decimal.Decimal] = None endingValue: Optional[decimal.Decimal] = None twr: Optional[decimal.Decimal] = None corporateActionProceeds: Optional[decimal.Decimal] = None commissionCreditsRedemption: Optional[decimal.Decimal] = None grantActivity: Optional[decimal.Decimal] = None excessFundSweep: Optional[decimal.Decimal] = None billableSalesTax: Optional[decimal.Decimal] = None _ChangeInNAV = ChangeInNAV @dataclass(frozen=True) class MTMPerformanceSummaryUnderlying(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None prevCloseQuantity: Optional[decimal.Decimal] = None prevClosePrice: Optional[decimal.Decimal] = None closeQuantity: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None transactionMtm: Optional[decimal.Decimal] = None priorOpenMtm: Optional[decimal.Decimal] = None commissions: Optional[decimal.Decimal] = None other: Optional[decimal.Decimal] = None total: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () corpActionMtm: Optional[decimal.Decimal] = None dividends: Optional[decimal.Decimal] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None otherWithAccruals: Optional[decimal.Decimal] = None totalWithAccruals: Optional[decimal.Decimal] = None @dataclass(frozen=True) class EquitySummaryByReportDateInBase(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None reportDate: Optional[datetime.date] = None cash: Optional[decimal.Decimal] = None cashLong: Optional[decimal.Decimal] = None cashShort: Optional[decimal.Decimal] = None slbCashCollateral: Optional[decimal.Decimal] = None slbCashCollateralLong: Optional[decimal.Decimal] = None slbCashCollateralShort: Optional[decimal.Decimal] = None stock: Optional[decimal.Decimal] = None stockLong: Optional[decimal.Decimal] = None stockShort: Optional[decimal.Decimal] = None slbDirectSecuritiesBorrowed: Optional[decimal.Decimal] = None slbDirectSecuritiesBorrowedLong: Optional[decimal.Decimal] = None slbDirectSecuritiesBorrowedShort: Optional[decimal.Decimal] = None slbDirectSecuritiesLent: Optional[decimal.Decimal] = None slbDirectSecuritiesLentLong: Optional[decimal.Decimal] = None slbDirectSecuritiesLentShort: Optional[decimal.Decimal] = None options: Optional[decimal.Decimal] = None optionsLong: Optional[decimal.Decimal] = None optionsShort: Optional[decimal.Decimal] = None bonds: Optional[decimal.Decimal] = None bondsLong: Optional[decimal.Decimal] = None bondsShort: Optional[decimal.Decimal] = None bondInterestAccrualsComponent: Optional[decimal.Decimal] = None bondInterestAccrualsComponentLong: Optional[decimal.Decimal] = None bondInterestAccrualsComponentShort: Optional[decimal.Decimal] = None notes: Optional[decimal.Decimal] = None notesLong: Optional[decimal.Decimal] = None notesShort: Optional[decimal.Decimal] = None interestAccruals: Optional[decimal.Decimal] = None interestAccrualsLong: Optional[decimal.Decimal] = None interestAccrualsShort: Optional[decimal.Decimal] = None softDollars: Optional[decimal.Decimal] = None softDollarsLong: Optional[decimal.Decimal] = None softDollarsShort: Optional[decimal.Decimal] = None dividendAccruals: Optional[decimal.Decimal] = None dividendAccrualsLong: Optional[decimal.Decimal] = None dividendAccrualsShort: Optional[decimal.Decimal] = None total: Optional[decimal.Decimal] = None totalLong: Optional[decimal.Decimal] = None totalShort: Optional[decimal.Decimal] = None commodities: Optional[decimal.Decimal] = None commoditiesLong: Optional[decimal.Decimal] = None commoditiesShort: Optional[decimal.Decimal] = None funds: Optional[decimal.Decimal] = None fundsLong: Optional[decimal.Decimal] = None fundsShort: Optional[decimal.Decimal] = None forexCfdUnrealizedPl: Optional[decimal.Decimal] = None forexCfdUnrealizedPlLong: Optional[decimal.Decimal] = None forexCfdUnrealizedPlShort: Optional[decimal.Decimal] = None brokerInterestAccrualsComponent: Optional[decimal.Decimal] = None brokerCashComponent: Optional[decimal.Decimal] = None brokerFeesAccrualsComponent: Optional[decimal.Decimal] = None brokerFeesAccrualsComponentLong: Optional[decimal.Decimal] = None brokerFeesAccrualsComponentShort: Optional[decimal.Decimal] = None cfdUnrealizedPl: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccount: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountLong: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountShort: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountCashComponent: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountCashComponentLong: Optional[decimal.Decimal] = None fdicInsuredBankSweepAccountCashComponentShort: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccruals: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsLong: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsShort: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsComponent: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsComponentLong: Optional[decimal.Decimal] = None fdicInsuredAccountInterestAccrualsComponentShort: Optional[decimal.Decimal] = None brokerCashComponentLong: Optional[decimal.Decimal] = None brokerCashComponentShort: Optional[decimal.Decimal] = None brokerInterestAccrualsComponentLong: Optional[decimal.Decimal] = None brokerInterestAccrualsComponentShort: Optional[decimal.Decimal] = None cfdUnrealizedPlLong: Optional[decimal.Decimal] = None cfdUnrealizedPlShort: Optional[decimal.Decimal] = None ipoSubscription: Optional[decimal.Decimal] = None ipoSubscriptionLong: Optional[decimal.Decimal] = None ipoSubscriptionShort: Optional[decimal.Decimal] = None physDel: Optional[decimal.Decimal] = None physDelLong: Optional[decimal.Decimal] = None physDelShort: Optional[decimal.Decimal] = None @dataclass(frozen=True) class MTDYTDPerformanceSummaryUnderlying(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None mtmMTD: Optional[decimal.Decimal] = None mtmYTD: Optional[decimal.Decimal] = None realSTMTD: Optional[decimal.Decimal] = None realSTYTD: Optional[decimal.Decimal] = None realLTMTD: Optional[decimal.Decimal] = None realLTYTD: Optional[decimal.Decimal] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None realizedPnlMTD: Optional[decimal.Decimal] = None realizedCapitalGainsPnlMTD: Optional[decimal.Decimal] = None realizedFxPnlMTD: Optional[decimal.Decimal] = None realizedPnlYTD: Optional[decimal.Decimal] = None realizedCapitalGainsPnlYTD: Optional[decimal.Decimal] = None realizedFxPnlYTD: Optional[decimal.Decimal] = None brokerFees: Optional[decimal.Decimal] = None brokerFeesSec: Optional[decimal.Decimal] = None brokerFeesCom: Optional[decimal.Decimal] = None brokerFeesMTD: Optional[decimal.Decimal] = None brokerFeesYTD: Optional[decimal.Decimal] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class CashReportCurrency(FlexElement): accountId: Optional[str] = None currency: Optional[str] = None fromDate: Optional[datetime.date] = None toDate: Optional[datetime.date] = None startingCash: Optional[decimal.Decimal] = None startingCashSec: Optional[decimal.Decimal] = None startingCashCom: Optional[decimal.Decimal] = None clientFees: Optional[decimal.Decimal] = None clientFeesSec: Optional[decimal.Decimal] = None clientFeesCom: Optional[decimal.Decimal] = None commissions: Optional[decimal.Decimal] = None commissionsSec: Optional[decimal.Decimal] = None commissionsCom: Optional[decimal.Decimal] = None billableCommissions: Optional[decimal.Decimal] = None billableCommissionsSec: Optional[decimal.Decimal] = None billableCommissionsCom: Optional[decimal.Decimal] = None depositWithdrawals: Optional[decimal.Decimal] = None depositWithdrawalsSec: Optional[decimal.Decimal] = None depositWithdrawalsCom: Optional[decimal.Decimal] = None deposits: Optional[decimal.Decimal] = None depositsSec: Optional[decimal.Decimal] = None depositsCom: Optional[decimal.Decimal] = None withdrawals: Optional[decimal.Decimal] = None withdrawalsSec: Optional[decimal.Decimal] = None withdrawalsCom: Optional[decimal.Decimal] = None accountTransfers: Optional[decimal.Decimal] = None accountTransfersSec: Optional[decimal.Decimal] = None accountTransfersCom: Optional[decimal.Decimal] = None internalTransfers: Optional[decimal.Decimal] = None internalTransfersSec: Optional[decimal.Decimal] = None internalTransfersCom: Optional[decimal.Decimal] = None dividends: Optional[decimal.Decimal] = None dividendsSec: Optional[decimal.Decimal] = None dividendsCom: Optional[decimal.Decimal] = None brokerFees: Optional[decimal.Decimal] = None brokerFeesSec: Optional[decimal.Decimal] = None brokerFeesCom: Optional[decimal.Decimal] = None brokerFeesMTD: Optional[decimal.Decimal] = None brokerFeesYTD: Optional[decimal.Decimal] = None brokerInterest: Optional[decimal.Decimal] = None brokerInterestSec: Optional[decimal.Decimal] = None brokerInterestCom: Optional[decimal.Decimal] = None bondInterest: Optional[decimal.Decimal] = None bondInterestSec: Optional[decimal.Decimal] = None bondInterestCom: Optional[decimal.Decimal] = None cashSettlingMtm: Optional[decimal.Decimal] = None cashSettlingMtmSec: Optional[decimal.Decimal] = None cashSettlingMtmCom: Optional[decimal.Decimal] = None cfdCharges: Optional[decimal.Decimal] = None cfdChargesSec: Optional[decimal.Decimal] = None cfdChargesCom: Optional[decimal.Decimal] = None netTradesSales: Optional[decimal.Decimal] = None netTradesSalesSec: Optional[decimal.Decimal] = None netTradesSalesCom: Optional[decimal.Decimal] = None netTradesPurchases: Optional[decimal.Decimal] = None netTradesPurchasesSec: Optional[decimal.Decimal] = None netTradesPurchasesCom: Optional[decimal.Decimal] = None feesReceivables: Optional[decimal.Decimal] = None feesReceivablesSec: Optional[decimal.Decimal] = None feesReceivablesCom: Optional[decimal.Decimal] = None paymentInLieu: Optional[decimal.Decimal] = None paymentInLieuSec: Optional[decimal.Decimal] = None paymentInLieuCom: Optional[decimal.Decimal] = None transactionTax: Optional[decimal.Decimal] = None transactionTaxSec: Optional[decimal.Decimal] = None transactionTaxCom: Optional[decimal.Decimal] = None withholdingTax: Optional[decimal.Decimal] = None withholdingTaxSec: Optional[decimal.Decimal] = None withholdingTaxCom: Optional[decimal.Decimal] = None fxTranslationGainLoss: Optional[decimal.Decimal] = None fxTranslationGainLossSec: Optional[decimal.Decimal] = None fxTranslationGainLossCom: Optional[decimal.Decimal] = None otherFees: Optional[decimal.Decimal] = None otherFeesSec: Optional[decimal.Decimal] = None otherFeesCom: Optional[decimal.Decimal] = None endingCash: Optional[decimal.Decimal] = None endingCashSec: Optional[decimal.Decimal] = None endingCashCom: Optional[decimal.Decimal] = None endingSettledCash: Optional[decimal.Decimal] = None endingSettledCashSec: Optional[decimal.Decimal] = None endingSettledCashCom: Optional[decimal.Decimal] = None clientFeesMTD: Optional[decimal.Decimal] = None clientFeesYTD: Optional[decimal.Decimal] = None commissionsMTD: Optional[decimal.Decimal] = None commissionsYTD: Optional[decimal.Decimal] = None billableCommissionsMTD: Optional[decimal.Decimal] = None billableCommissionsYTD: Optional[decimal.Decimal] = None depositWithdrawalsMTD: Optional[decimal.Decimal] = None depositWithdrawalsYTD: Optional[decimal.Decimal] = None depositsMTD: Optional[decimal.Decimal] = None depositsYTD: Optional[decimal.Decimal] = None withdrawalsMTD: Optional[decimal.Decimal] = None withdrawalsYTD: Optional[decimal.Decimal] = None accountTransfersMTD: Optional[decimal.Decimal] = None accountTransfersYTD: Optional[decimal.Decimal] = None internalTransfersMTD: Optional[decimal.Decimal] = None internalTransfersYTD: Optional[decimal.Decimal] = None excessFundSweep: Optional[decimal.Decimal] = None excessFundSweepSec: Optional[decimal.Decimal] = None excessFundSweepCom: Optional[decimal.Decimal] = None excessFundSweepMTD: Optional[decimal.Decimal] = None excessFundSweepYTD: Optional[decimal.Decimal] = None dividendsMTD: Optional[decimal.Decimal] = None dividendsYTD: Optional[decimal.Decimal] = None insuredDepositInterestMTD: Optional[decimal.Decimal] = None insuredDepositInterestYTD: Optional[decimal.Decimal] = None brokerInterestMTD: Optional[decimal.Decimal] = None brokerInterestYTD: Optional[decimal.Decimal] = None bondInterestMTD: Optional[decimal.Decimal] = None bondInterestYTD: Optional[decimal.Decimal] = None cashSettlingMtmMTD: Optional[decimal.Decimal] = None cashSettlingMtmYTD: Optional[decimal.Decimal] = None realizedVmMTD: Optional[decimal.Decimal] = None realizedVmYTD: Optional[decimal.Decimal] = None cfdChargesMTD: Optional[decimal.Decimal] = None cfdChargesYTD: Optional[decimal.Decimal] = None netTradesSalesMTD: Optional[decimal.Decimal] = None netTradesSalesYTD: Optional[decimal.Decimal] = None advisorFeesMTD: Optional[decimal.Decimal] = None advisorFeesYTD: Optional[decimal.Decimal] = None feesReceivablesMTD: Optional[decimal.Decimal] = None feesReceivablesYTD: Optional[decimal.Decimal] = None netTradesPurchasesMTD: Optional[decimal.Decimal] = None netTradesPurchasesYTD: Optional[decimal.Decimal] = None paymentInLieuMTD: Optional[decimal.Decimal] = None paymentInLieuYTD: Optional[decimal.Decimal] = None transactionTaxMTD: Optional[decimal.Decimal] = None transactionTaxYTD: Optional[decimal.Decimal] = None taxReceivablesMTD: Optional[decimal.Decimal] = None taxReceivablesYTD: Optional[decimal.Decimal] = None withholdingTaxMTD: Optional[decimal.Decimal] = None withholdingTaxYTD: Optional[decimal.Decimal] = None withholding871mMTD: Optional[decimal.Decimal] = None withholding871mYTD: Optional[decimal.Decimal] = None withholdingCollectedTaxMTD: Optional[decimal.Decimal] = None withholdingCollectedTaxYTD: Optional[decimal.Decimal] = None salesTaxMTD: Optional[decimal.Decimal] = None salesTaxYTD: Optional[decimal.Decimal] = None otherFeesMTD: Optional[decimal.Decimal] = None otherFeesYTD: Optional[decimal.Decimal] = None acctAlias: Optional[str] = None model: Optional[str] = None avgCreditBalance: Optional[decimal.Decimal] = None avgCreditBalanceSec: Optional[decimal.Decimal] = None avgCreditBalanceCom: Optional[decimal.Decimal] = None avgDebitBalance: Optional[decimal.Decimal] = None avgDebitBalanceSec: Optional[decimal.Decimal] = None avgDebitBalanceCom: Optional[decimal.Decimal] = None linkingAdjustments: Optional[decimal.Decimal] = None linkingAdjustmentsSec: Optional[decimal.Decimal] = None linkingAdjustmentsCom: Optional[decimal.Decimal] = None insuredDepositInterest: Optional[decimal.Decimal] = None insuredDepositInterestSec: Optional[decimal.Decimal] = None insuredDepositInterestCom: Optional[decimal.Decimal] = None realizedVm: Optional[decimal.Decimal] = None realizedVmSec: Optional[decimal.Decimal] = None realizedVmCom: Optional[decimal.Decimal] = None advisorFees: Optional[decimal.Decimal] = None advisorFeesSec: Optional[decimal.Decimal] = None advisorFeesCom: Optional[decimal.Decimal] = None taxReceivables: Optional[decimal.Decimal] = None taxReceivablesSec: Optional[decimal.Decimal] = None taxReceivablesCom: Optional[decimal.Decimal] = None withholding871m: Optional[decimal.Decimal] = None withholding871mSec: Optional[decimal.Decimal] = None withholding871mCom: Optional[decimal.Decimal] = None withholdingCollectedTax: Optional[decimal.Decimal] = None withholdingCollectedTaxSec: Optional[decimal.Decimal] = None withholdingCollectedTaxCom: Optional[decimal.Decimal] = None salesTax: Optional[decimal.Decimal] = None salesTaxSec: Optional[decimal.Decimal] = None salesTaxCom: Optional[decimal.Decimal] = None other: Optional[decimal.Decimal] = None otherSec: Optional[decimal.Decimal] = None otherCom: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None debitCardActivity: Optional[decimal.Decimal] = None debitCardActivitySec: Optional[decimal.Decimal] = None debitCardActivityCom: Optional[decimal.Decimal] = None debitCardActivityMTD: Optional[decimal.Decimal] = None debitCardActivityYTD: Optional[decimal.Decimal] = None billPay: Optional[decimal.Decimal] = None billPaySec: Optional[decimal.Decimal] = None billPayCom: Optional[decimal.Decimal] = None billPayMTD: Optional[decimal.Decimal] = None billPayYTD: Optional[decimal.Decimal] = None realizedForexVm: Optional[decimal.Decimal] = None realizedForexVmSec: Optional[decimal.Decimal] = None realizedForexVmCom: Optional[decimal.Decimal] = None realizedForexVmMTD: Optional[decimal.Decimal] = None realizedForexVmYTD: Optional[decimal.Decimal] = None ipoSubscription: Optional[decimal.Decimal] = None ipoSubscriptionSec: Optional[decimal.Decimal] = None ipoSubscriptionCom: Optional[decimal.Decimal] = None ipoSubscriptionMTD: Optional[decimal.Decimal] = None ipoSubscriptionYTD: Optional[decimal.Decimal] = None billableSalesTax: Optional[decimal.Decimal] = None billableSalesTaxSec: Optional[decimal.Decimal] = None billableSalesTaxCom: Optional[decimal.Decimal] = None billableSalesTaxMTD: Optional[decimal.Decimal] = None billableSalesTaxYTD: Optional[decimal.Decimal] = None commissionCreditsRedemption: Optional[decimal.Decimal] = None commissionCreditsRedemptionSec: Optional[decimal.Decimal] = None commissionCreditsRedemptionCom: Optional[decimal.Decimal] = None commissionCreditsRedemptionMTD: Optional[decimal.Decimal] = None commissionCreditsRedemptionYTD: Optional[decimal.Decimal] = None @dataclass(frozen=True) class StatementOfFundsLine(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None balance: Optional[decimal.Decimal] = None debit: Optional[decimal.Decimal] = None credit: Optional[decimal.Decimal] = None currency: Optional[str] = None tradeID: Optional[str] = None date: Optional[datetime.datetime] = None reportDate: Optional[datetime.date] = None activityDescription: Optional[str] = None amount: Optional[decimal.Decimal] = None buySell: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None fxRateToBase: Optional[decimal.Decimal] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None settleDate: Optional[datetime.date] = None activityCode: Optional[str] = None orderID: Optional[str] = None tradeQuantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeGross: Optional[decimal.Decimal] = None tradeCommission: Optional[decimal.Decimal] = None tradeTax: Optional[decimal.Decimal] = None tradeCode: Optional[str] = None levelOfDetail: Optional[str] = None transactionID: Optional[str] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class ChangeInPositionValue(FlexElement): assetCategory: Optional[enums.AssetClass] = None currency: Optional[str] = None priorPeriodValue: Optional[decimal.Decimal] = None transactions: Optional[decimal.Decimal] = None mtmPriorPeriodPositions: Optional[decimal.Decimal] = None mtmTransactions: Optional[decimal.Decimal] = None corporateActions: Optional[decimal.Decimal] = None accountTransfers: Optional[decimal.Decimal] = None fxTranslationPnl: Optional[decimal.Decimal] = None futurePriceAdjustments: Optional[decimal.Decimal] = None settledCash: Optional[decimal.Decimal] = None endOfPeriodValue: Optional[decimal.Decimal] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None other: Optional[decimal.Decimal] = None linkingAdjustments: Optional[decimal.Decimal] = None @dataclass(frozen=True) class OpenPosition(FlexElement): side: Optional[enums.LongShort] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None multiplier: Optional[decimal.Decimal] = None position: Optional[decimal.Decimal] = None markPrice: Optional[decimal.Decimal] = None positionValue: Optional[decimal.Decimal] = None openPrice: Optional[decimal.Decimal] = None costBasisPrice: Optional[decimal.Decimal] = None costBasisMoney: Optional[decimal.Decimal] = None fifoPnlUnrealized: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None openDateTime: Optional[datetime.datetime] = None holdingPeriodDateTime: Optional[datetime.datetime] = None securityIDType: Optional[str] = None issuer: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None code: Tuple[enums.Code, ...] = () originatingOrderID: Optional[str] = None originatingTransactionID: Optional[str] = None accruedInt: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None sedol: Optional[str] = None percentOfNAV: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None positionValueInBase: Optional[decimal.Decimal] = None unrealizedCapitalGainsPnl: Optional[decimal.Decimal] = None unrealizedlFxPnl: Optional[decimal.Decimal] = None vestingDate: Optional[datetime.date] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class FxLot(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None reportDate: Optional[datetime.date] = None functionalCurrency: Optional[str] = None fxCurrency: Optional[str] = None quantity: Optional[decimal.Decimal] = None costPrice: Optional[decimal.Decimal] = None costBasis: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None value: Optional[decimal.Decimal] = None unrealizedPL: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () lotDescription: Optional[str] = None lotOpenDateTime: Optional[datetime.datetime] = None levelOfDetail: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None @dataclass(frozen=True) class Trade(FlexElement): transactionType: Optional[enums.TradeType] = None openCloseIndicator: Optional[enums.OpenClose] = None buySell: Optional[enums.BuySell] = None orderType: Optional[enums.OrderType] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None tradeID: Optional[str] = None reportDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None tradeTime: Optional[datetime.time] = None settleDateTarget: Optional[datetime.date] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeMoney: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None netCash: Optional[decimal.Decimal] = None netCashInBase: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None notes: Tuple[enums.Code, ...] = () cost: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None origOrderID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None fifoPnlRealized: Optional[decimal.Decimal] = None capitalGainsPnl: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None ibOrderID: Optional[str] = None orderTime: Optional[datetime.datetime] = None changeInPrice: Optional[decimal.Decimal] = None changeInQuantity: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None clearingFirmID: Optional[str] = None # be deselected in the Flex query template. transactionID: Optional[str] = None holdingPeriodDateTime: Optional[datetime.datetime] = None ibExecID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None exchOrderId: Optional[str] = None extExecID: Optional[str] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None acctAlias: Optional[str] = None model: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None principalAdjustFactor: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None sedol: Optional[str] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None accruedInt: Optional[decimal.Decimal] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class Lot(FlexElement): transactionType: Optional[enums.TradeType] = None openCloseIndicator: Optional[enums.OpenClose] = None buySell: Optional[enums.BuySell] = None orderType: Optional[enums.OrderType] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None tradeID: Optional[str] = None reportDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None tradeTime: Optional[datetime.time] = None settleDateTarget: Optional[datetime.date] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeMoney: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None netCash: Optional[decimal.Decimal] = None netCashInBase: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None notes: Tuple[enums.Code, ...] = () # separator = ";" cost: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None origOrderID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None fifoPnlRealized: Optional[decimal.Decimal] = None capitalGainsPnl: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None ibOrderID: Optional[str] = None # Despite the name, `orderTime` actually contains date/time data. orderTime: Optional[datetime.datetime] = None changeInPrice: Optional[decimal.Decimal] = None changeInQuantity: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None clearingFirmID: Optional[str] = None # Effective 2013, every Trade has a `transactionID` attribute that can't transactionID: Optional[str] = None holdingPeriodDateTime: Optional[datetime.datetime] = None ibExecID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None exchOrderId: Optional[str] = None extExecID: Optional[str] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None acctAlias: Optional[str] = None model: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None principalAdjustFactor: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None sedol: Optional[str] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None @dataclass(frozen=True) class UnbundledCommissionDetail(FlexElement): buySell: Optional[enums.BuySell] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None price: Optional[decimal.Decimal] = None tradeID: Optional[str] = None orderReference: Optional[str] = None totalCommission: Optional[decimal.Decimal] = None brokerExecutionCharge: Optional[decimal.Decimal] = None brokerClearingCharge: Optional[decimal.Decimal] = None thirdPartyExecutionCharge: Optional[decimal.Decimal] = None thirdPartyClearingCharge: Optional[decimal.Decimal] = None thirdPartyRegulatoryCharge: Optional[decimal.Decimal] = None regFINRATradingActivityFee: Optional[decimal.Decimal] = None regSection31TransactionFee: Optional[decimal.Decimal] = None regOther: Optional[decimal.Decimal] = None other: Optional[decimal.Decimal] = None @dataclass(frozen=True) class SymbolSummary(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None transactionType: Optional[enums.TradeType] = None tradeID: Optional[str] = None orderID: Optional[decimal.Decimal] = None execID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None clearingFirmID: Optional[str] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None orderTime: Optional[datetime.datetime] = None dateTime: Optional[datetime.datetime] = None reportDate: Optional[datetime.date] = None settleDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None exchange: Optional[str] = None buySell: Optional[enums.BuySell] = None quantity: Optional[decimal.Decimal] = None price: Optional[decimal.Decimal] = None amount: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None commission: Optional[decimal.Decimal] = None brokerExecutionCommission: Optional[decimal.Decimal] = None brokerClearingCommission: Optional[decimal.Decimal] = None thirdPartyExecutionCommission: Optional[decimal.Decimal] = None thirdPartyClearingCommission: Optional[decimal.Decimal] = None thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None otherCommission: Optional[decimal.Decimal] = None commissionCurrency: Optional[str] = None tax: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () orderType: Optional[enums.OrderType] = None levelOfDetail: Optional[str] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None allocatedTo: Optional[str] = None accruedInt: Optional[decimal.Decimal] = None @dataclass(frozen=True) class Order(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None transactionType: Optional[enums.TradeType] = None tradeID: Optional[str] = None orderID: Optional[decimal.Decimal] = None execID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None clearingFirmID: Optional[str] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None orderTime: Optional[datetime.datetime] = None dateTime: Optional[datetime.datetime] = None reportDate: Optional[datetime.date] = None settleDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None exchange: Optional[str] = None buySell: Optional[enums.BuySell] = None quantity: Optional[decimal.Decimal] = None price: Optional[decimal.Decimal] = None amount: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None commission: Optional[decimal.Decimal] = None brokerExecutionCommission: Optional[decimal.Decimal] = None brokerClearingCommission: Optional[decimal.Decimal] = None thirdPartyExecutionCommission: Optional[decimal.Decimal] = None thirdPartyClearingCommission: Optional[decimal.Decimal] = None thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None otherCommission: Optional[decimal.Decimal] = None commissionCurrency: Optional[str] = None tax: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () orderType: Optional[enums.OrderType] = None levelOfDetail: Optional[str] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None allocatedTo: Optional[str] = None accruedInt: Optional[decimal.Decimal] = None netCash: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibOrderID: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None settleDateTarget: Optional[datetime.date] = None tradeMoney: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None closePrice: Optional[decimal.Decimal] = None openCloseIndicator: Optional[enums.OpenClose] = None notes: Optional[str] = None cost: Optional[decimal.Decimal] = None fifoPnlRealized: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None origOrderID: Optional[str] = None transactionID: Optional[str] = None ibExecID: Optional[str] = None exchOrderId: Optional[str] = None extExecID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None holdingPeriodDateTime: Optional[datetime.datetime] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None changeInPrice: Optional[decimal.Decimal] = None changeInQuantity: Optional[decimal.Decimal] = None @dataclass(frozen=True) class TradeConfirm(FlexElement): transactionType: Optional[enums.TradeType] = None openCloseIndicator: Optional[enums.OpenClose] = None buySell: Optional[enums.BuySell] = None orderType: Optional[enums.OrderType] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None tradeID: Optional[str] = None reportDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None tradeTime: Optional[datetime.time] = None settleDateTarget: Optional[datetime.date] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeMoney: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None netCash: Optional[decimal.Decimal] = None closePrice: Optional[decimal.Decimal] = None notes: Tuple[enums.Code, ...] = () cost: Optional[decimal.Decimal] = None fifoPnlRealized: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None origOrderID: Optional[str] = None clearingFirmID: Optional[str] = None transactionID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None holdingPeriodDateTime: Optional[datetime.datetime] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None levelOfDetail: Optional[str] = None commissionCurrency: Optional[str] = None price: Optional[decimal.Decimal] = None thirdPartyClearingCommission: Optional[decimal.Decimal] = None orderID: Optional[decimal.Decimal] = None allocatedTo: Optional[str] = None thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None brokerExecutionCommission: Optional[decimal.Decimal] = None thirdPartyExecutionCommission: Optional[decimal.Decimal] = None amount: Optional[decimal.Decimal] = None otherCommission: Optional[decimal.Decimal] = None commission: Optional[decimal.Decimal] = None brokerClearingCommission: Optional[decimal.Decimal] = None ibOrderID: Optional[str] = None ibExecID: Optional[str] = None execID: Optional[str] = None brokerageOrderID: Optional[str] = None orderReference: Optional[str] = None volatilityOrderLink: Optional[str] = None exchOrderId: Optional[str] = None extExecID: Optional[str] = None orderTime: Optional[datetime.datetime] = None changeInPrice: Optional[decimal.Decimal] = None changeInQuantity: Optional[decimal.Decimal] = None traderID: Optional[str] = None isAPIOrder: Optional[bool] = None code: Tuple[enums.Code, ...] = () tax: Optional[decimal.Decimal] = None listingExchange: Optional[str] = None underlyingListingExchange: Optional[str] = None settleDate: Optional[datetime.date] = None underlyingSecurityID: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None accruedInt: Optional[decimal.Decimal] = None @dataclass(frozen=True) class OptionEAE(FlexElement): transactionType: Optional[enums.OptionAction] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.date] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None markPrice: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None commisionsAndTax: Optional[decimal.Decimal] = None costBasis: Optional[decimal.Decimal] = None realizedPnl: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None tradeID: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None _OptionEAE = OptionEAE @dataclass(frozen=True) class TradeTransfer(FlexElement): transactionType: Optional[enums.TradeType] = None openCloseIndicator: Optional[enums.OpenClose] = None direction: Optional[enums.ToFrom] = None deliveredReceived: Optional[enums.DeliveredReceived] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None tradeID: Optional[str] = None reportDate: Optional[datetime.date] = None tradeDate: Optional[datetime.date] = None tradeTime: Optional[datetime.time] = None settleDateTarget: Optional[datetime.date] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeMoney: Optional[decimal.Decimal] = None taxes: Optional[decimal.Decimal] = None ibCommission: Optional[decimal.Decimal] = None ibCommissionCurrency: Optional[str] = None closePrice: Optional[decimal.Decimal] = None notes: Tuple[enums.Code, ...] = () cost: Optional[decimal.Decimal] = None fifoPnlRealized: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None brokerName: Optional[str] = None brokerAccount: Optional[str] = None awayBrokerCommission: Optional[decimal.Decimal] = None regulatoryFee: Optional[decimal.Decimal] = None netTradeMoney: Optional[decimal.Decimal] = None netTradeMoneyInBase: Optional[decimal.Decimal] = None netTradePrice: Optional[decimal.Decimal] = None multiplier: Optional[decimal.Decimal] = None acctAlias: Optional[str] = None model: Optional[str] = None sedol: Optional[str] = None securityID: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None proceeds: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None netCash: Optional[decimal.Decimal] = None origTradePrice: Optional[decimal.Decimal] = None origTradeDate: Optional[datetime.date] = None origTradeID: Optional[str] = None origOrderID: Optional[str] = None clearingFirmID: Optional[str] = None transactionID: Optional[str] = None openDateTime: Optional[datetime.datetime] = None holdingPeriodDateTime: Optional[datetime.datetime] = None whenRealized: Optional[datetime.datetime] = None whenReopened: Optional[datetime.datetime] = None levelOfDetail: Optional[str] = None securityIDType: Optional[str] = None @dataclass(frozen=True) class InterestAccrualsCurrency(FlexElement): accountId: Optional[str] = None currency: Optional[str] = None fromDate: Optional[datetime.date] = None toDate: Optional[datetime.date] = None startingAccrualBalance: Optional[decimal.Decimal] = None interestAccrued: Optional[decimal.Decimal] = None accrualReversal: Optional[decimal.Decimal] = None endingAccrualBalance: Optional[decimal.Decimal] = None acctAlias: Optional[str] = None model: Optional[str] = None fxTranslation: Optional[decimal.Decimal] = None @dataclass(frozen=True) class TierInterestDetail(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None interestType: Optional[str] = None valueDate: Optional[datetime.date] = None tierBreak: Optional[str] = None balanceThreshold: Optional[decimal.Decimal] = None securitiesPrincipal: Optional[decimal.Decimal] = None commoditiesPrincipal: Optional[decimal.Decimal] = None ibuklPrincipal: Optional[decimal.Decimal] = None totalPrincipal: Optional[decimal.Decimal] = None rate: Optional[decimal.Decimal] = None securitiesInterest: Optional[decimal.Decimal] = None commoditiesInterest: Optional[decimal.Decimal] = None ibuklInterest: Optional[decimal.Decimal] = None totalInterest: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () fromAcct: Optional[str] = None toAcct: Optional[str] = None @dataclass(frozen=True) class HardToBorrowDetail(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None valueDate: Optional[datetime.date] = None quantity: Optional[decimal.Decimal] = None price: Optional[decimal.Decimal] = None value: Optional[decimal.Decimal] = None borrowFeeRate: Optional[decimal.Decimal] = None borrowFee: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () fromAcct: Optional[str] = None toAcct: Optional[str] = None @dataclass(frozen=True) class SLBActivity(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.date] = None slbTransactionId: Optional[str] = None activityDescription: Optional[str] = None type: Optional[str] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None feeRate: Optional[decimal.Decimal] = None collateralAmount: Optional[decimal.Decimal] = None markQuantity: Optional[decimal.Decimal] = None markPriorPrice: Optional[decimal.Decimal] = None markCurrentPrice: Optional[decimal.Decimal] = None @dataclass(frozen=True) class SLBFee: accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[str] = None assetCategory: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None valueDate: Optional[datetime.date] = None startDate: Optional[datetime.date] = None type: Optional[str] = None exchange: Optional[str] = None quantity: Optional[decimal.Decimal] = None collateralAmount: Optional[decimal.Decimal] = None feeRate: Optional[decimal.Decimal] = None fee: Optional[decimal.Decimal] = None carryCharge: Optional[decimal.Decimal] = None ticketCharge: Optional[decimal.Decimal] = None totalCharges: Optional[decimal.Decimal] = None marketFeeRate: Optional[decimal.Decimal] = None grossLendFee: Optional[decimal.Decimal] = None netLendFeeRate: Optional[decimal.Decimal] = None netLendFee: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () fromAcct: Optional[str] = None toAcct: Optional[str] = None @dataclass(frozen=True) class Transfer(FlexElement): type: Optional[enums.TransferType] = None direction: Optional[enums.InOut] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None reportDate: Optional[datetime.date] = None underlyingConid: Optional[str] = None date: Optional[datetime.date] = None dateTime: Optional[datetime.datetime] = None account: Optional[str] = None quantity: Optional[decimal.Decimal] = None transferPrice: Optional[decimal.Decimal] = None positionAmount: Optional[decimal.Decimal] = None positionAmountInBase: Optional[decimal.Decimal] = None capitalGainsPnl: Optional[decimal.Decimal] = None cashTransfer: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () clientReference: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None company: Optional[str] = None accountName: Optional[str] = None pnlAmount: Optional[decimal.Decimal] = None pnlAmountInBase: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None transactionID: Optional[str] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class UnsettledTransfer(FlexElement): direction: Optional[enums.ToFrom] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None underlyingConid: Optional[str] = None stage: Optional[str] = None tradeDate: Optional[datetime.date] = None targetSettlement: Optional[datetime.date] = None contra: Optional[str] = None quantity: Optional[decimal.Decimal] = None tradePrice: Optional[decimal.Decimal] = None tradeAmount: Optional[decimal.Decimal] = None tradeAmountInBase: Optional[decimal.Decimal] = None transactionID: Optional[str] = None @dataclass(frozen=True) class PriorPeriodPosition(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None priorMtmPnl: Optional[decimal.Decimal] = None date: Optional[datetime.date] = None price: Optional[decimal.Decimal] = None acctAlias: Optional[str] = None model: Optional[str] = None sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None @dataclass(frozen=True) class CorporateAction(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None actionDescription: Optional[str] = None dateTime: Optional[datetime.datetime] = None amount: Optional[decimal.Decimal] = None quantity: Optional[decimal.Decimal] = None fifoPnlRealized: Optional[decimal.Decimal] = None capitalGainsPnl: Optional[decimal.Decimal] = None fxPnl: Optional[decimal.Decimal] = None mtmPnl: Optional[decimal.Decimal] = None type: Optional[enums.Reorg] = None code: Tuple[enums.Code, ...] = () sedol: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None proceeds: Optional[decimal.Decimal] = None value: Optional[decimal.Decimal] = None transactionID: Optional[str] = None @dataclass(frozen=True) class CashTransaction(FlexElement): type: Optional[enums.CashAction] = None assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None amount: Optional[decimal.Decimal] = None dateTime: Optional[datetime.datetime] = None sedol: Optional[str] = None symbol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None tradeID: Optional[str] = None code: Tuple[enums.Code, ...] = () transactionID: Optional[str] = None reportDate: Optional[datetime.date] = None clientReference: Optional[str] = None settleDate: Optional[datetime.date] = None acctAlias: Optional[str] = None model: Optional[str] = None levelOfDetail: Optional[str] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class DebitCardActivity(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None assetCategory: Optional[enums.AssetClass] = None status: Optional[str] = None reportDate: Optional[datetime.date] = None postingDate: Optional[datetime.date] = None transactionDateTime: Optional[datetime.datetime] = None category: Optional[str] = None merchantNameLocation: Optional[str] = None amount: Optional[decimal.Decimal] = None model: Optional[str] = None @dataclass(frozen=True) class ChangeInDividendAccrual(FlexElement): date: Optional[datetime.date] = None assetCategory: Optional[enums.AssetClass] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None accountId: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None reportDate: Optional[datetime.date] = None underlyingConid: Optional[str] = None exDate: Optional[datetime.date] = None payDate: Optional[datetime.date] = None quantity: Optional[decimal.Decimal] = None tax: Optional[decimal.Decimal] = None fee: Optional[decimal.Decimal] = None grossRate: Optional[decimal.Decimal] = None grossAmount: Optional[decimal.Decimal] = None netAmount: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None fromAcct: Optional[str] = None toAcct: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None _ChangeInDividendAccrual = ChangeInDividendAccrual @dataclass(frozen=True) class OpenDividendAccrual(FlexElement): assetCategory: Optional[enums.AssetClass] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None accountId: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None exDate: Optional[datetime.date] = None payDate: Optional[datetime.date] = None quantity: Optional[decimal.Decimal] = None tax: Optional[decimal.Decimal] = None fee: Optional[decimal.Decimal] = None grossRate: Optional[decimal.Decimal] = None grossAmount: Optional[decimal.Decimal] = None netAmount: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None fromAcct: Optional[str] = None toAcct: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class SecurityInfo(FlexElement): assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingCategory: Optional[str] = None subCategory: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None maturity: Optional[str] = None issueDate: Optional[datetime.date] = None type: Optional[str] = None sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () currency: Optional[str] = None settlementPolicyMethod: Optional[str] = None @dataclass(frozen=True) class ConversionRate(FlexElement): reportDate: Optional[datetime.date] = None fromCurrency: Optional[str] = None toCurrency: Optional[str] = None rate: Optional[decimal.Decimal] = None @dataclass(frozen=True) class FIFOPerformanceSummaryUnderlying(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None listingExchange: Optional[str] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None realizedSTProfit: Optional[decimal.Decimal] = None realizedSTLoss: Optional[decimal.Decimal] = None realizedLTProfit: Optional[decimal.Decimal] = None realizedLTLoss: Optional[decimal.Decimal] = None totalRealizedPnl: Optional[decimal.Decimal] = None unrealizedProfit: Optional[decimal.Decimal] = None unrealizedLoss: Optional[decimal.Decimal] = None totalUnrealizedPnl: Optional[decimal.Decimal] = None totalFifoPnl: Optional[decimal.Decimal] = None totalRealizedCapitalGainsPnl: Optional[decimal.Decimal] = None totalRealizedFxPnl: Optional[decimal.Decimal] = None totalUnrealizedCapitalGainsPnl: Optional[decimal.Decimal] = None totalUnrealizedFxPnl: Optional[decimal.Decimal] = None totalCapitalGainsPnl: Optional[decimal.Decimal] = None totalFxPnl: Optional[decimal.Decimal] = None transferredPnl: Optional[decimal.Decimal] = None transferredCapitalGainsPnl: Optional[decimal.Decimal] = None transferredFxPnl: Optional[decimal.Decimal] = None sedol: Optional[str] = None securityIDType: Optional[str] = None underlyingSymbol: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None unrealizedSTProfit: Optional[decimal.Decimal] = None unrealizedSTLoss: Optional[decimal.Decimal] = None unrealizedLTProfit: Optional[decimal.Decimal] = None unrealizedLTLoss: Optional[decimal.Decimal] = None costAdj: Optional[decimal.Decimal] = None code: Tuple[enums.Code, ...] = () serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class NetStockPosition(FlexElement): assetCategory: Optional[enums.AssetClass] = None accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None sedol: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None sharesAtIb: Optional[decimal.Decimal] = None sharesBorrowed: Optional[decimal.Decimal] = None sharesLent: Optional[decimal.Decimal] = None netShares: Optional[decimal.Decimal] = None serialNumber: Optional[str] = None deliveryType: Optional[str] = None commodityType: Optional[str] = None fineness: Optional[decimal.Decimal] = None weight: Optional[str] = None @dataclass(frozen=True) class ClientFee(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None feeType: Optional[str] = None date: Optional[datetime.datetime] = None description: Optional[str] = None expenseIndicator: Optional[str] = None revenue: Optional[decimal.Decimal] = None expense: Optional[decimal.Decimal] = None net: Optional[decimal.Decimal] = None revenueInBase: Optional[decimal.Decimal] = None expenseInBase: Optional[decimal.Decimal] = None netInBase: Optional[decimal.Decimal] = None tradeID: Optional[str] = None execID: Optional[str] = None levelOfDetail: Optional[str] = None @dataclass(frozen=True) class ClientFeesDetail(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None date: Optional[datetime.datetime] = None tradeID: Optional[str] = None execID: Optional[str] = None totalRevenue: Optional[decimal.Decimal] = None totalCommission: Optional[decimal.Decimal] = None brokerExecutionCharge: Optional[decimal.Decimal] = None clearingCharge: Optional[decimal.Decimal] = None thirdPartyExecutionCharge: Optional[decimal.Decimal] = None thirdPartyRegulatoryCharge: Optional[decimal.Decimal] = None regFINRATradingActivityFee: Optional[decimal.Decimal] = None regSection31TransactionFee: Optional[decimal.Decimal] = None regOther: Optional[decimal.Decimal] = None totalNet: Optional[decimal.Decimal] = None totalNetInBase: Optional[decimal.Decimal] = None levelOfDetail: Optional[str] = None other: Optional[decimal.Decimal] = None @dataclass(frozen=True) class TransactionTax(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.datetime] = None taxDescription: Optional[str] = None quantity: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None taxAmount: Optional[decimal.Decimal] = None tradeId: Optional[str] = None tradePrice: Optional[decimal.Decimal] = None source: Optional[str] = None code: Tuple[enums.Code, ...] = () levelOfDetail: Optional[str] = None @dataclass(frozen=True) class TransactionTaxDetail(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.datetime] = None taxDescription: Optional[str] = None quantity: Optional[decimal.Decimal] = None reportDate: Optional[datetime.date] = None taxAmount: Optional[decimal.Decimal] = None tradeId: Optional[str] = None tradePrice: Optional[decimal.Decimal] = None source: Optional[str] = None code: Tuple[enums.Code, ...] = () levelOfDetail: Optional[str] = None @dataclass(frozen=True) class SalesTax(FlexElement): accountId: Optional[str] = None acctAlias: Optional[str] = None model: Optional[str] = None currency: Optional[str] = None fxRateToBase: Optional[decimal.Decimal] = None assetCategory: Optional[enums.AssetClass] = None symbol: Optional[str] = None description: Optional[str] = None conid: Optional[str] = None securityID: Optional[str] = None securityIDType: Optional[str] = None cusip: Optional[str] = None isin: Optional[str] = None listingExchange: Optional[str] = None underlyingConid: Optional[str] = None underlyingSecurityID: Optional[str] = None underlyingSymbol: Optional[str] = None underlyingListingExchange: Optional[str] = None issuer: Optional[str] = None multiplier: Optional[decimal.Decimal] = None strike: Optional[decimal.Decimal] = None expiry: Optional[datetime.date] = None putCall: Optional[enums.PutCall] = None principalAdjustFactor: Optional[decimal.Decimal] = None date: Optional[datetime.date] = None country: Optional[str] = None taxType: Optional[str] = None payer: Optional[str] = None taxableDescription: Optional[str] = None taxableAmount: Optional[decimal.Decimal] = None taxRate: Optional[decimal.Decimal] = None salesTax: Optional[decimal.Decimal] = None taxableTransactionID: Optional[str] = None transactionID: Optional[str] = None code: Tuple[enums.Code, ...] = () _ClientFeesDetail = ClientFeesDetail
true
true
f703fa3f53c5913ed04533abc3463b282d9d8fb7
3,343
py
Python
examples/oauth2_async.py
Nobyx/pyfy
e18a7b7e48eefc4cb58e5d826c341bce99452a66
[ "MIT" ]
48
2019-02-13T19:53:39.000Z
2021-05-04T20:56:34.000Z
examples/oauth2_async.py
Nobyx/pyfy
e18a7b7e48eefc4cb58e5d826c341bce99452a66
[ "MIT" ]
21
2019-01-09T17:46:13.000Z
2021-08-22T12:38:59.000Z
examples/oauth2_async.py
Nobyx/pyfy
e18a7b7e48eefc4cb58e5d826c341bce99452a66
[ "MIT" ]
15
2019-01-03T01:30:24.000Z
2022-01-30T09:53:18.000Z
import os import aiofiles import webbrowser import json as stdlib_json from sanic import Sanic, response from sanic.exceptions import abort from sanic.response import json from pyfy import AsyncSpotify, ClientCreds, AuthError try: from spt_keys import KEYS except: # noqa: E722 from spt_keys_template import KEYS app = Sanic(__name__) local_address = "localhost" local_port = "5000" local_full_address = local_address + ":" + str(local_port) spt = AsyncSpotify() client = ClientCreds() state = "123" @app.route("/authorize") def authorize(request): export_keys() client.load_from_env() spt.client_creds = client if spt.is_oauth_ready: return response.redirect(spt.auth_uri(state=state)) else: return ( json( { "error_description": "Client needs client_id, client_secret and a redirect uri in order to handle OAauth properly" } ), 500, ) @app.route("/callback/spotify") # You have to register this callback async def spotify_callback(request): if request.args.get("error"): return json(dict(error=request.args.get("error_description"))) elif request.args.get("code"): grant = request.args.get("code") callback_state = request.args.get("state") if callback_state != state: abort(401) try: user_creds = await spt.build_user_creds(grant=grant) async with aiofiles.open(os.getcwd() + "SPOTIFY_CREDS.json", "w") as file: await file.write(stdlib_json.dumps(user_creds.__dict__)) except AuthError as e: return json(dict(error_description=e.msg, error_code=e.code), e.code) else: await spt.populate_user_creds() print(os.getcwd()) return await response.file(os.getcwd() + "SPOTIFY_CREDS.json") # return response.json(dict(user_creds=user_creds.__dict__, check_if_active=app.url_for('is_active', _scheme='http', _external=True, _server=local_full_address)), 200) else: return response.text("Something is wrong with your callback") @app.route("/is_active") async def is_active(request): return json( dict( is_active=await spt.is_active, your_tracks=app.url_for( "tracks", _scheme="http", _external=True, _server=local_full_address ), your_playlists=app.url_for( "playlists", _scheme="http", _external=True, _server=local_full_address ), ) ) @app.route("/dump_creds") def dump_creds(request): # TODO: save both client and user creds and send to user as json files to downlaod return response.text("Not Implemented") @app.route("/") def index(request): return response.text("OK") @app.route("/tracks") async def tracks(request): return json(await spt.user_tracks()) @app.route("/playlists") async def playlists(request): return json(await spt.user_playlists()) def export_keys(): for k, v in KEYS.items(): if v: os.environ[k] = v print("export " + k + "=" + v) if __name__ == "__main__": webbrowser.open_new_tab("http://" + local_full_address + "/authorize") app.run(host=local_address, port=str(local_port), debug=True)
28.57265
179
0.643733
import os import aiofiles import webbrowser import json as stdlib_json from sanic import Sanic, response from sanic.exceptions import abort from sanic.response import json from pyfy import AsyncSpotify, ClientCreds, AuthError try: from spt_keys import KEYS except: from spt_keys_template import KEYS app = Sanic(__name__) local_address = "localhost" local_port = "5000" local_full_address = local_address + ":" + str(local_port) spt = AsyncSpotify() client = ClientCreds() state = "123" @app.route("/authorize") def authorize(request): export_keys() client.load_from_env() spt.client_creds = client if spt.is_oauth_ready: return response.redirect(spt.auth_uri(state=state)) else: return ( json( { "error_description": "Client needs client_id, client_secret and a redirect uri in order to handle OAauth properly" } ), 500, ) @app.route("/callback/spotify") async def spotify_callback(request): if request.args.get("error"): return json(dict(error=request.args.get("error_description"))) elif request.args.get("code"): grant = request.args.get("code") callback_state = request.args.get("state") if callback_state != state: abort(401) try: user_creds = await spt.build_user_creds(grant=grant) async with aiofiles.open(os.getcwd() + "SPOTIFY_CREDS.json", "w") as file: await file.write(stdlib_json.dumps(user_creds.__dict__)) except AuthError as e: return json(dict(error_description=e.msg, error_code=e.code), e.code) else: await spt.populate_user_creds() print(os.getcwd()) return await response.file(os.getcwd() + "SPOTIFY_CREDS.json") else: return response.text("Something is wrong with your callback") @app.route("/is_active") async def is_active(request): return json( dict( is_active=await spt.is_active, your_tracks=app.url_for( "tracks", _scheme="http", _external=True, _server=local_full_address ), your_playlists=app.url_for( "playlists", _scheme="http", _external=True, _server=local_full_address ), ) ) @app.route("/dump_creds") def dump_creds(request): return response.text("Not Implemented") @app.route("/") def index(request): return response.text("OK") @app.route("/tracks") async def tracks(request): return json(await spt.user_tracks()) @app.route("/playlists") async def playlists(request): return json(await spt.user_playlists()) def export_keys(): for k, v in KEYS.items(): if v: os.environ[k] = v print("export " + k + "=" + v) if __name__ == "__main__": webbrowser.open_new_tab("http://" + local_full_address + "/authorize") app.run(host=local_address, port=str(local_port), debug=True)
true
true
f703fa90e2d737ae58f0bbbe0f32941d8edab9d8
1,652
py
Python
firmware/adafruit-circuitpython-bundle-5.x-mpy-20200915/examples/ili9341_simpletest.py
freeglow/microcontroller-cpy
5adfda49da6eefaece81be2a2f26122d68736355
[ "MIT" ]
null
null
null
firmware/adafruit-circuitpython-bundle-5.x-mpy-20200915/examples/ili9341_simpletest.py
freeglow/microcontroller-cpy
5adfda49da6eefaece81be2a2f26122d68736355
[ "MIT" ]
null
null
null
firmware/adafruit-circuitpython-bundle-5.x-mpy-20200915/examples/ili9341_simpletest.py
freeglow/microcontroller-cpy
5adfda49da6eefaece81be2a2f26122d68736355
[ "MIT" ]
null
null
null
""" This test will initialize the display using displayio and draw a solid green background, a smaller purple rectangle, and some yellow text. All drawing is done using native displayio modules. Pinouts are for the 2.4" TFT FeatherWing or Breakout with a Feather M4 or M0. """ import board import terminalio import displayio from adafruit_display_text import label import adafruit_ili9341 # Release any resources currently in use for the displays displayio.release_displays() spi = board.SPI() tft_cs = board.D9 tft_dc = board.D10 display_bus = displayio.FourWire( spi, command=tft_dc, chip_select=tft_cs, reset=board.D6 ) display = adafruit_ili9341.ILI9341(display_bus, width=320, height=240) # Make the display context splash = displayio.Group(max_size=10) display.show(splash) # Draw a green background color_bitmap = displayio.Bitmap(320, 240, 1) color_palette = displayio.Palette(1) color_palette[0] = 0x00FF00 # Bright Green bg_sprite = displayio.TileGrid(color_bitmap, pixel_shader=color_palette, x=0, y=0) splash.append(bg_sprite) # Draw a smaller inner rectangle inner_bitmap = displayio.Bitmap(280, 200, 1) inner_palette = displayio.Palette(1) inner_palette[0] = 0xAA0088 # Purple inner_sprite = displayio.TileGrid(inner_bitmap, pixel_shader=inner_palette, x=20, y=20) splash.append(inner_sprite) # Draw a label text_group = displayio.Group(max_size=10, scale=3, x=57, y=120) text = "Hello World!" text_area = label.Label(terminalio.FONT, text=text, color=0xFFFF00) text_group.append(text_area) # Subgroup for text scaling splash.append(text_group) while True: pass
30.036364
88
0.757869
import board import terminalio import displayio from adafruit_display_text import label import adafruit_ili9341 displayio.release_displays() spi = board.SPI() tft_cs = board.D9 tft_dc = board.D10 display_bus = displayio.FourWire( spi, command=tft_dc, chip_select=tft_cs, reset=board.D6 ) display = adafruit_ili9341.ILI9341(display_bus, width=320, height=240) splash = displayio.Group(max_size=10) display.show(splash) color_bitmap = displayio.Bitmap(320, 240, 1) color_palette = displayio.Palette(1) color_palette[0] = 0x00FF00 bg_sprite = displayio.TileGrid(color_bitmap, pixel_shader=color_palette, x=0, y=0) splash.append(bg_sprite) inner_bitmap = displayio.Bitmap(280, 200, 1) inner_palette = displayio.Palette(1) inner_palette[0] = 0xAA0088 inner_sprite = displayio.TileGrid(inner_bitmap, pixel_shader=inner_palette, x=20, y=20) splash.append(inner_sprite) text_group = displayio.Group(max_size=10, scale=3, x=57, y=120) text = "Hello World!" text_area = label.Label(terminalio.FONT, text=text, color=0xFFFF00) text_group.append(text_area) splash.append(text_group) while True: pass
true
true
f703fb6b4a046fbf6ff6a8ee312a5201ae8c1525
14,119
py
Python
contrib/gitian-build.py
minblock/CBreezycoin
ea83e04d2e1eff4823f36234f98cae107210cc58
[ "MIT" ]
null
null
null
contrib/gitian-build.py
minblock/CBreezycoin
ea83e04d2e1eff4823f36234f98cae107210cc58
[ "MIT" ]
null
null
null
contrib/gitian-build.py
minblock/CBreezycoin
ea83e04d2e1eff4823f36234f98cae107210cc58
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import argparse import os import subprocess import sys def setup(): global args, workdir programs = ['ruby', 'git', 'apt-cacher-ng', 'make', 'wget'] if args.kvm: programs += ['python-vm-builder', 'qemu-kvm', 'qemu-utils'] elif args.docker: dockers = ['docker.io', 'docker-ce'] for i in dockers: return_code = subprocess.call(['sudo', 'apt-get', 'install', '-qq', i]) if return_code == 0: break if return_code != 0: print('Cannot find any way to install docker', file=sys.stderr) exit(1) else: programs += ['lxc', 'debootstrap'] subprocess.check_call(['sudo', 'apt-get', 'install', '-qq'] + programs) if not os.path.isdir('gitian.sigs.ltc'): subprocess.check_call(['git', 'clone', 'https://github.com/cbreezycoin-project/gitian.sigs.ltc.git']) if not os.path.isdir('cbreezycoin-detached-sigs'): subprocess.check_call(['git', 'clone', 'https://github.com/cbreezycoin-project/cbreezycoin-detached-sigs.git']) if not os.path.isdir('gitian-builder'): subprocess.check_call(['git', 'clone', 'https://github.com/devrandom/gitian-builder.git']) if not os.path.isdir('cbreezycoin'): subprocess.check_call(['git', 'clone', 'https://github.com/cbreezycoin-project/cbreezycoin.git']) os.chdir('gitian-builder') make_image_prog = ['bin/make-base-vm', '--suite', 'bionic', '--arch', 'amd64'] if args.docker: make_image_prog += ['--docker'] elif not args.kvm: make_image_prog += ['--lxc'] subprocess.check_call(make_image_prog) os.chdir(workdir) if args.is_bionic and not args.kvm and not args.docker: subprocess.check_call(['sudo', 'sed', '-i', 's/lxcbr0/br0/', '/etc/default/lxc-net']) print('Reboot is required') exit(0) def build(): global args, workdir os.makedirs('cbreezycoin-binaries/' + args.version, exist_ok=True) print('\nBuilding Dependencies\n') os.chdir('gitian-builder') os.makedirs('inputs', exist_ok=True) subprocess.check_call(['wget', '-N', '-P', 'inputs', 'https://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz']) subprocess.check_call(['wget', '-N', '-P', 'inputs', 'https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch']) subprocess.check_call(["echo 'a8c4e9cafba922f89de0df1f2152e7be286aba73f78505169bc351a7938dd911 inputs/osslsigncode-Backports-to-1.7.1.patch' | sha256sum -c"], shell=True) subprocess.check_call(["echo 'f9a8cdb38b9c309326764ebc937cba1523a3a751a7ab05df3ecc99d18ae466c9 inputs/osslsigncode-1.7.1.tar.gz' | sha256sum -c"], shell=True) subprocess.check_call(['make', '-C', '../cbreezycoin/depends', 'download', 'SOURCES_PATH=' + os.getcwd() + '/cache/common']) if args.linux: print('\nCompiling ' + args.version + ' Linux') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'cbreezycoin='+args.commit, '--url', 'cbreezycoin='+args.url, '../cbreezycoin/contrib/gitian-descriptors/gitian-linux.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-linux', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-linux.yml']) subprocess.check_call('mv build/out/cbreezycoin-*.tar.gz build/out/src/cbreezycoin-*.tar.gz ../cbreezycoin-binaries/'+args.version, shell=True) if args.windows: print('\nCompiling ' + args.version + ' Windows') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'cbreezycoin='+args.commit, '--url', 'cbreezycoin='+args.url, '../cbreezycoin/contrib/gitian-descriptors/gitian-win.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-unsigned', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-win.yml']) subprocess.check_call('mv build/out/cbreezycoin-*-win-unsigned.tar.gz inputs/', shell=True) subprocess.check_call('mv build/out/cbreezycoin-*.zip build/out/cbreezycoin-*.exe ../cbreezycoin-binaries/'+args.version, shell=True) if args.macos: print('\nCompiling ' + args.version + ' MacOS') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'cbreezycoin='+args.commit, '--url', 'cbreezycoin='+args.url, '../cbreezycoin/contrib/gitian-descriptors/gitian-osx.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-unsigned', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-osx.yml']) subprocess.check_call('mv build/out/cbreezycoin-*-osx-unsigned.tar.gz inputs/', shell=True) subprocess.check_call('mv build/out/cbreezycoin-*.tar.gz build/out/cbreezycoin-*.dmg ../cbreezycoin-binaries/'+args.version, shell=True) os.chdir(workdir) if args.commit_files: print('\nCommitting '+args.version+' Unsigned Sigs\n') os.chdir('gitian.sigs.ltc') subprocess.check_call(['git', 'add', args.version+'-linux/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-win-unsigned/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-osx-unsigned/'+args.signer]) subprocess.check_call(['git', 'commit', '-m', 'Add '+args.version+' unsigned sigs for '+args.signer]) os.chdir(workdir) def sign(): global args, workdir os.chdir('gitian-builder') if args.windows: print('\nSigning ' + args.version + ' Windows') subprocess.check_call('cp inputs/cbreezycoin-' + args.version + '-win-unsigned.tar.gz inputs/cbreezycoin-win-unsigned.tar.gz', shell=True) subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature='+args.commit, '../cbreezycoin/contrib/gitian-descriptors/gitian-win-signer.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-signed', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-win-signer.yml']) subprocess.check_call('mv build/out/cbreezycoin-*win64-setup.exe ../cbreezycoin-binaries/'+args.version, shell=True) subprocess.check_call('mv build/out/cbreezycoin-*win32-setup.exe ../cbreezycoin-binaries/'+args.version, shell=True) if args.macos: print('\nSigning ' + args.version + ' MacOS') subprocess.check_call('cp inputs/cbreezycoin-' + args.version + '-osx-unsigned.tar.gz inputs/cbreezycoin-osx-unsigned.tar.gz', shell=True) subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature='+args.commit, '../cbreezycoin/contrib/gitian-descriptors/gitian-osx-signer.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-signed', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-osx-signer.yml']) subprocess.check_call('mv build/out/cbreezycoin-osx-signed.dmg ../cbreezycoin-binaries/'+args.version+'/cbreezycoin-'+args.version+'-osx.dmg', shell=True) os.chdir(workdir) if args.commit_files: print('\nCommitting '+args.version+' Signed Sigs\n') os.chdir('gitian.sigs.ltc') subprocess.check_call(['git', 'add', args.version+'-win-signed/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-osx-signed/'+args.signer]) subprocess.check_call(['git', 'commit', '-a', '-m', 'Add '+args.version+' signed binary sigs for '+args.signer]) os.chdir(workdir) def verify(): global args, workdir os.chdir('gitian-builder') print('\nVerifying v'+args.version+' Linux\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-linux', '../cbreezycoin/contrib/gitian-descriptors/gitian-linux.yml']) print('\nVerifying v'+args.version+' Windows\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-win-unsigned', '../cbreezycoin/contrib/gitian-descriptors/gitian-win.yml']) print('\nVerifying v'+args.version+' MacOS\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-osx-unsigned', '../cbreezycoin/contrib/gitian-descriptors/gitian-osx.yml']) print('\nVerifying v'+args.version+' Signed Windows\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-win-signed', '../cbreezycoin/contrib/gitian-descriptors/gitian-win-signer.yml']) print('\nVerifying v'+args.version+' Signed MacOS\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-osx-signed', '../cbreezycoin/contrib/gitian-descriptors/gitian-osx-signer.yml']) os.chdir(workdir) def main(): global args, workdir parser = argparse.ArgumentParser(usage='%(prog)s [options] signer version') parser.add_argument('-c', '--commit', action='store_true', dest='commit', help='Indicate that the version argument is for a commit or branch') parser.add_argument('-p', '--pull', action='store_true', dest='pull', help='Indicate that the version argument is the number of a github repository pull request') parser.add_argument('-u', '--url', dest='url', default='https://github.com/cbreezycoin-project/cbreezycoin', help='Specify the URL of the repository. Default is %(default)s') parser.add_argument('-v', '--verify', action='store_true', dest='verify', help='Verify the Gitian build') parser.add_argument('-b', '--build', action='store_true', dest='build', help='Do a Gitian build') parser.add_argument('-s', '--sign', action='store_true', dest='sign', help='Make signed binaries for Windows and MacOS') parser.add_argument('-B', '--buildsign', action='store_true', dest='buildsign', help='Build both signed and unsigned binaries') parser.add_argument('-o', '--os', dest='os', default='lwm', help='Specify which Operating Systems the build is for. Default is %(default)s. l for Linux, w for Windows, m for MacOS') parser.add_argument('-j', '--jobs', dest='jobs', default='2', help='Number of processes to use. Default %(default)s') parser.add_argument('-m', '--memory', dest='memory', default='2000', help='Memory to allocate in MiB. Default %(default)s') parser.add_argument('-k', '--kvm', action='store_true', dest='kvm', help='Use KVM instead of LXC') parser.add_argument('-d', '--docker', action='store_true', dest='docker', help='Use Docker instead of LXC') parser.add_argument('-S', '--setup', action='store_true', dest='setup', help='Set up the Gitian building environment. Uses LXC. If you want to use KVM, use the --kvm option. Only works on Debian-based systems (Ubuntu, Debian)') parser.add_argument('-D', '--detach-sign', action='store_true', dest='detach_sign', help='Create the assert file for detached signing. Will not commit anything.') parser.add_argument('-n', '--no-commit', action='store_false', dest='commit_files', help='Do not commit anything to git') parser.add_argument('signer', help='GPG signer to sign each build assert file') parser.add_argument('version', help='Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified') args = parser.parse_args() workdir = os.getcwd() args.linux = 'l' in args.os args.windows = 'w' in args.os args.macos = 'm' in args.os args.is_bionic = b'bionic' in subprocess.check_output(['lsb_release', '-cs']) if args.buildsign: args.build=True args.sign=True if args.kvm and args.docker: raise Exception('Error: cannot have both kvm and docker') args.sign_prog = 'true' if args.detach_sign else 'gpg --detach-sign' # Set environment variable USE_LXC or USE_DOCKER, let gitian-builder know that we use lxc or docker if args.docker: os.environ['USE_DOCKER'] = '1' elif not args.kvm: os.environ['USE_LXC'] = '1' if not 'GITIAN_HOST_IP' in os.environ.keys(): os.environ['GITIAN_HOST_IP'] = '10.0.3.1' if not 'LXC_GUEST_IP' in os.environ.keys(): os.environ['LXC_GUEST_IP'] = '10.0.3.5' # Disable for MacOS if no SDK found if args.macos and not os.path.isfile('gitian-builder/inputs/MacOSX10.11.sdk.tar.gz'): print('Cannot build for MacOS, SDK does not exist. Will build for other OSes') args.macos = False script_name = os.path.basename(sys.argv[0]) # Signer and version shouldn't be empty if args.signer == '': print(script_name+': Missing signer.') print('Try '+script_name+' --help for more information') exit(1) if args.version == '': print(script_name+': Missing version.') print('Try '+script_name+' --help for more information') exit(1) # Add leading 'v' for tags if args.commit and args.pull: raise Exception('Cannot have both commit and pull') args.commit = ('' if args.commit else 'v') + args.version if args.setup: setup() os.chdir('cbreezycoin') if args.pull: subprocess.check_call(['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge']) os.chdir('../gitian-builder/inputs/cbreezycoin') subprocess.check_call(['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge']) args.commit = subprocess.check_output(['git', 'show', '-s', '--format=%H', 'FETCH_HEAD'], universal_newlines=True, encoding='utf8').strip() args.version = 'pull-' + args.version print(args.commit) subprocess.check_call(['git', 'fetch']) subprocess.check_call(['git', 'checkout', args.commit]) os.chdir(workdir) if args.build: build() if args.sign: sign() if args.verify: verify() if __name__ == '__main__': main()
60.337607
239
0.662157
import argparse import os import subprocess import sys def setup(): global args, workdir programs = ['ruby', 'git', 'apt-cacher-ng', 'make', 'wget'] if args.kvm: programs += ['python-vm-builder', 'qemu-kvm', 'qemu-utils'] elif args.docker: dockers = ['docker.io', 'docker-ce'] for i in dockers: return_code = subprocess.call(['sudo', 'apt-get', 'install', '-qq', i]) if return_code == 0: break if return_code != 0: print('Cannot find any way to install docker', file=sys.stderr) exit(1) else: programs += ['lxc', 'debootstrap'] subprocess.check_call(['sudo', 'apt-get', 'install', '-qq'] + programs) if not os.path.isdir('gitian.sigs.ltc'): subprocess.check_call(['git', 'clone', 'https://github.com/cbreezycoin-project/gitian.sigs.ltc.git']) if not os.path.isdir('cbreezycoin-detached-sigs'): subprocess.check_call(['git', 'clone', 'https://github.com/cbreezycoin-project/cbreezycoin-detached-sigs.git']) if not os.path.isdir('gitian-builder'): subprocess.check_call(['git', 'clone', 'https://github.com/devrandom/gitian-builder.git']) if not os.path.isdir('cbreezycoin'): subprocess.check_call(['git', 'clone', 'https://github.com/cbreezycoin-project/cbreezycoin.git']) os.chdir('gitian-builder') make_image_prog = ['bin/make-base-vm', '--suite', 'bionic', '--arch', 'amd64'] if args.docker: make_image_prog += ['--docker'] elif not args.kvm: make_image_prog += ['--lxc'] subprocess.check_call(make_image_prog) os.chdir(workdir) if args.is_bionic and not args.kvm and not args.docker: subprocess.check_call(['sudo', 'sed', '-i', 's/lxcbr0/br0/', '/etc/default/lxc-net']) print('Reboot is required') exit(0) def build(): global args, workdir os.makedirs('cbreezycoin-binaries/' + args.version, exist_ok=True) print('\nBuilding Dependencies\n') os.chdir('gitian-builder') os.makedirs('inputs', exist_ok=True) subprocess.check_call(['wget', '-N', '-P', 'inputs', 'https://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz']) subprocess.check_call(['wget', '-N', '-P', 'inputs', 'https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch']) subprocess.check_call(["echo 'a8c4e9cafba922f89de0df1f2152e7be286aba73f78505169bc351a7938dd911 inputs/osslsigncode-Backports-to-1.7.1.patch' | sha256sum -c"], shell=True) subprocess.check_call(["echo 'f9a8cdb38b9c309326764ebc937cba1523a3a751a7ab05df3ecc99d18ae466c9 inputs/osslsigncode-1.7.1.tar.gz' | sha256sum -c"], shell=True) subprocess.check_call(['make', '-C', '../cbreezycoin/depends', 'download', 'SOURCES_PATH=' + os.getcwd() + '/cache/common']) if args.linux: print('\nCompiling ' + args.version + ' Linux') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'cbreezycoin='+args.commit, '--url', 'cbreezycoin='+args.url, '../cbreezycoin/contrib/gitian-descriptors/gitian-linux.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-linux', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-linux.yml']) subprocess.check_call('mv build/out/cbreezycoin-*.tar.gz build/out/src/cbreezycoin-*.tar.gz ../cbreezycoin-binaries/'+args.version, shell=True) if args.windows: print('\nCompiling ' + args.version + ' Windows') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'cbreezycoin='+args.commit, '--url', 'cbreezycoin='+args.url, '../cbreezycoin/contrib/gitian-descriptors/gitian-win.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-unsigned', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-win.yml']) subprocess.check_call('mv build/out/cbreezycoin-*-win-unsigned.tar.gz inputs/', shell=True) subprocess.check_call('mv build/out/cbreezycoin-*.zip build/out/cbreezycoin-*.exe ../cbreezycoin-binaries/'+args.version, shell=True) if args.macos: print('\nCompiling ' + args.version + ' MacOS') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'cbreezycoin='+args.commit, '--url', 'cbreezycoin='+args.url, '../cbreezycoin/contrib/gitian-descriptors/gitian-osx.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-unsigned', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-osx.yml']) subprocess.check_call('mv build/out/cbreezycoin-*-osx-unsigned.tar.gz inputs/', shell=True) subprocess.check_call('mv build/out/cbreezycoin-*.tar.gz build/out/cbreezycoin-*.dmg ../cbreezycoin-binaries/'+args.version, shell=True) os.chdir(workdir) if args.commit_files: print('\nCommitting '+args.version+' Unsigned Sigs\n') os.chdir('gitian.sigs.ltc') subprocess.check_call(['git', 'add', args.version+'-linux/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-win-unsigned/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-osx-unsigned/'+args.signer]) subprocess.check_call(['git', 'commit', '-m', 'Add '+args.version+' unsigned sigs for '+args.signer]) os.chdir(workdir) def sign(): global args, workdir os.chdir('gitian-builder') if args.windows: print('\nSigning ' + args.version + ' Windows') subprocess.check_call('cp inputs/cbreezycoin-' + args.version + '-win-unsigned.tar.gz inputs/cbreezycoin-win-unsigned.tar.gz', shell=True) subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature='+args.commit, '../cbreezycoin/contrib/gitian-descriptors/gitian-win-signer.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-signed', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-win-signer.yml']) subprocess.check_call('mv build/out/cbreezycoin-*win64-setup.exe ../cbreezycoin-binaries/'+args.version, shell=True) subprocess.check_call('mv build/out/cbreezycoin-*win32-setup.exe ../cbreezycoin-binaries/'+args.version, shell=True) if args.macos: print('\nSigning ' + args.version + ' MacOS') subprocess.check_call('cp inputs/cbreezycoin-' + args.version + '-osx-unsigned.tar.gz inputs/cbreezycoin-osx-unsigned.tar.gz', shell=True) subprocess.check_call(['bin/gbuild', '-i', '--commit', 'signature='+args.commit, '../cbreezycoin/contrib/gitian-descriptors/gitian-osx-signer.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-signed', '--destination', '../gitian.sigs.ltc/', '../cbreezycoin/contrib/gitian-descriptors/gitian-osx-signer.yml']) subprocess.check_call('mv build/out/cbreezycoin-osx-signed.dmg ../cbreezycoin-binaries/'+args.version+'/cbreezycoin-'+args.version+'-osx.dmg', shell=True) os.chdir(workdir) if args.commit_files: print('\nCommitting '+args.version+' Signed Sigs\n') os.chdir('gitian.sigs.ltc') subprocess.check_call(['git', 'add', args.version+'-win-signed/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-osx-signed/'+args.signer]) subprocess.check_call(['git', 'commit', '-a', '-m', 'Add '+args.version+' signed binary sigs for '+args.signer]) os.chdir(workdir) def verify(): global args, workdir os.chdir('gitian-builder') print('\nVerifying v'+args.version+' Linux\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-linux', '../cbreezycoin/contrib/gitian-descriptors/gitian-linux.yml']) print('\nVerifying v'+args.version+' Windows\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-win-unsigned', '../cbreezycoin/contrib/gitian-descriptors/gitian-win.yml']) print('\nVerifying v'+args.version+' MacOS\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-osx-unsigned', '../cbreezycoin/contrib/gitian-descriptors/gitian-osx.yml']) print('\nVerifying v'+args.version+' Signed Windows\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-win-signed', '../cbreezycoin/contrib/gitian-descriptors/gitian-win-signer.yml']) print('\nVerifying v'+args.version+' Signed MacOS\n') subprocess.check_call(['bin/gverify', '-v', '-d', '../gitian.sigs.ltc/', '-r', args.version+'-osx-signed', '../cbreezycoin/contrib/gitian-descriptors/gitian-osx-signer.yml']) os.chdir(workdir) def main(): global args, workdir parser = argparse.ArgumentParser(usage='%(prog)s [options] signer version') parser.add_argument('-c', '--commit', action='store_true', dest='commit', help='Indicate that the version argument is for a commit or branch') parser.add_argument('-p', '--pull', action='store_true', dest='pull', help='Indicate that the version argument is the number of a github repository pull request') parser.add_argument('-u', '--url', dest='url', default='https://github.com/cbreezycoin-project/cbreezycoin', help='Specify the URL of the repository. Default is %(default)s') parser.add_argument('-v', '--verify', action='store_true', dest='verify', help='Verify the Gitian build') parser.add_argument('-b', '--build', action='store_true', dest='build', help='Do a Gitian build') parser.add_argument('-s', '--sign', action='store_true', dest='sign', help='Make signed binaries for Windows and MacOS') parser.add_argument('-B', '--buildsign', action='store_true', dest='buildsign', help='Build both signed and unsigned binaries') parser.add_argument('-o', '--os', dest='os', default='lwm', help='Specify which Operating Systems the build is for. Default is %(default)s. l for Linux, w for Windows, m for MacOS') parser.add_argument('-j', '--jobs', dest='jobs', default='2', help='Number of processes to use. Default %(default)s') parser.add_argument('-m', '--memory', dest='memory', default='2000', help='Memory to allocate in MiB. Default %(default)s') parser.add_argument('-k', '--kvm', action='store_true', dest='kvm', help='Use KVM instead of LXC') parser.add_argument('-d', '--docker', action='store_true', dest='docker', help='Use Docker instead of LXC') parser.add_argument('-S', '--setup', action='store_true', dest='setup', help='Set up the Gitian building environment. Uses LXC. If you want to use KVM, use the --kvm option. Only works on Debian-based systems (Ubuntu, Debian)') parser.add_argument('-D', '--detach-sign', action='store_true', dest='detach_sign', help='Create the assert file for detached signing. Will not commit anything.') parser.add_argument('-n', '--no-commit', action='store_false', dest='commit_files', help='Do not commit anything to git') parser.add_argument('signer', help='GPG signer to sign each build assert file') parser.add_argument('version', help='Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified') args = parser.parse_args() workdir = os.getcwd() args.linux = 'l' in args.os args.windows = 'w' in args.os args.macos = 'm' in args.os args.is_bionic = b'bionic' in subprocess.check_output(['lsb_release', '-cs']) if args.buildsign: args.build=True args.sign=True if args.kvm and args.docker: raise Exception('Error: cannot have both kvm and docker') args.sign_prog = 'true' if args.detach_sign else 'gpg --detach-sign' if args.docker: os.environ['USE_DOCKER'] = '1' elif not args.kvm: os.environ['USE_LXC'] = '1' if not 'GITIAN_HOST_IP' in os.environ.keys(): os.environ['GITIAN_HOST_IP'] = '10.0.3.1' if not 'LXC_GUEST_IP' in os.environ.keys(): os.environ['LXC_GUEST_IP'] = '10.0.3.5' if args.macos and not os.path.isfile('gitian-builder/inputs/MacOSX10.11.sdk.tar.gz'): print('Cannot build for MacOS, SDK does not exist. Will build for other OSes') args.macos = False script_name = os.path.basename(sys.argv[0]) if args.signer == '': print(script_name+': Missing signer.') print('Try '+script_name+' --help for more information') exit(1) if args.version == '': print(script_name+': Missing version.') print('Try '+script_name+' --help for more information') exit(1) # Add leading 'v' for tags if args.commit and args.pull: raise Exception('Cannot have both commit and pull') args.commit = ('' if args.commit else 'v') + args.version if args.setup: setup() os.chdir('cbreezycoin') if args.pull: subprocess.check_call(['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge']) os.chdir('../gitian-builder/inputs/cbreezycoin') subprocess.check_call(['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge']) args.commit = subprocess.check_output(['git', 'show', '-s', '--format=%H', 'FETCH_HEAD'], universal_newlines=True, encoding='utf8').strip() args.version = 'pull-' + args.version print(args.commit) subprocess.check_call(['git', 'fetch']) subprocess.check_call(['git', 'checkout', args.commit]) os.chdir(workdir) if args.build: build() if args.sign: sign() if args.verify: verify() if __name__ == '__main__': main()
true
true
f703fe3302fd8e97b2aba9b8d194ec5033b5a3f2
1,353
py
Python
autobahntestsuite/autobahntestsuite/case/case1_2_5.py
rishabh-bector/autobahn-testsuite
57030060630c10b22be44774973eaa61987b716c
[ "Apache-2.0" ]
595
2015-10-20T09:01:18.000Z
2022-03-28T08:48:27.000Z
autobahntestsuite/autobahntestsuite/case/case1_2_5.py
rishabh-bector/autobahn-testsuite
57030060630c10b22be44774973eaa61987b716c
[ "Apache-2.0" ]
73
2015-12-03T14:21:56.000Z
2022-02-05T01:53:05.000Z
autobahntestsuite/autobahntestsuite/case/case1_2_5.py
rishabh-bector/autobahn-testsuite
57030060630c10b22be44774973eaa61987b716c
[ "Apache-2.0" ]
65
2015-11-04T15:58:37.000Z
2022-02-09T03:49:24.000Z
############################################################################### ## ## Copyright (c) Crossbar.io Technologies GmbH ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. ## ############################################################################### from case import Case class Case1_2_5(Case): DESCRIPTION = """Send binary message message with payload of length 128.""" EXPECTATION = """Receive echo'ed binary message (with payload as sent). Clean close with normal code.""" def onOpen(self): payload = "\xfe" * 128 self.expected[Case.OK] = [("message", payload, True)] self.expectedClose = {"closedByMe":True,"closeCode":[self.p.CLOSE_STATUS_CODE_NORMAL],"requireClean":True} self.p.sendFrame(opcode = 2, payload = payload) self.p.killAfter(1)
41
113
0.606061
from case import Case class Case1_2_5(Case): DESCRIPTION = """Send binary message message with payload of length 128.""" EXPECTATION = """Receive echo'ed binary message (with payload as sent). Clean close with normal code.""" def onOpen(self): payload = "\xfe" * 128 self.expected[Case.OK] = [("message", payload, True)] self.expectedClose = {"closedByMe":True,"closeCode":[self.p.CLOSE_STATUS_CODE_NORMAL],"requireClean":True} self.p.sendFrame(opcode = 2, payload = payload) self.p.killAfter(1)
true
true
f703fe79f2870b477658e585c0a8cec88df6106d
101
py
Python
Simples/Multiplos.py
AlexDeSaran/Python
6c4ce2ad49fafa1d1d5e543d14e94a9e13463321
[ "MIT" ]
null
null
null
Simples/Multiplos.py
AlexDeSaran/Python
6c4ce2ad49fafa1d1d5e543d14e94a9e13463321
[ "MIT" ]
null
null
null
Simples/Multiplos.py
AlexDeSaran/Python
6c4ce2ad49fafa1d1d5e543d14e94a9e13463321
[ "MIT" ]
null
null
null
a = int(input()) for i in range(1,11): total = i*a print('{} x {} = {}'.format(i, a,total))
16.833333
44
0.485149
a = int(input()) for i in range(1,11): total = i*a print('{} x {} = {}'.format(i, a,total))
true
true
f7040147311911bc74b5f09fef490f0f55ddd7ee
8,220
py
Python
tests/DifferentialGame/masac_gnn_gaussian.py
maxiaoba/rlk
3e23473f6bbc59552b6b2bcd97245e024d7ca95d
[ "MIT" ]
1
2021-09-28T21:16:54.000Z
2021-09-28T21:16:54.000Z
tests/DifferentialGame/masac_gnn_gaussian.py
maxiaoba/rlkit
3e23473f6bbc59552b6b2bcd97245e024d7ca95d
[ "MIT" ]
null
null
null
tests/DifferentialGame/masac_gnn_gaussian.py
maxiaoba/rlkit
3e23473f6bbc59552b6b2bcd97245e024d7ca95d
[ "MIT" ]
null
null
null
import copy import torch.nn as nn from rlkit.launchers.launcher_util import setup_logger import rlkit.torch.pytorch_util as ptu from rlkit.core.ma_eval_util import get_generic_ma_path_information def experiment(variant): num_agent = variant['num_agent'] from differential_game import DifferentialGame expl_env = DifferentialGame(game_name=args.exp_name) eval_env = DifferentialGame(game_name=args.exp_name) obs_dim = eval_env.observation_space.low.size action_dim = eval_env.action_space.low.size from rlkit.torch.networks.graph_builders import FullGraphBuilder graph_builder1 = FullGraphBuilder( input_node_dim=obs_dim+action_dim, num_node=num_agent, contain_self_loop=False) from rlkit.torch.networks.gnn_networks import GNNNet gnn1 = GNNNet( graph_builder1, node_dim=variant['qf_kwargs']['hidden_dim'], conv_type=variant['qf_kwargs']['conv_type'], num_conv_layers=1, hidden_activation='relu', output_activation='relu', ) qf1 = nn.Sequential( gnn1, nn.Linear(variant['qf_kwargs']['hidden_dim'],1) ) target_qf1 = copy.deepcopy(qf1) from rlkit.torch.networks.graph_builders import FullGraphBuilder graph_builder2 = FullGraphBuilder( input_node_dim=obs_dim+action_dim, num_node=num_agent, contain_self_loop=False) from rlkit.torch.networks.gnn_networks import GNNNet gnn2 = GNNNet( graph_builder2, node_dim=variant['qf_kwargs']['hidden_dim'], conv_type=variant['qf_kwargs']['conv_type'], num_conv_layers=1, hidden_activation='relu', output_activation='relu', ) qf2 = nn.Sequential( gnn2, nn.Linear(variant['qf_kwargs']['hidden_dim'],1) ) target_qf2 = copy.deepcopy(qf2) policy_n, eval_policy_n, expl_policy_n = [], [], [] for i in range(num_agent): from rlkit.torch.networks.layers import SplitLayer policy = nn.Sequential( nn.Linear(obs_dim,variant['policy_kwargs']['hidden_dim']), nn.ReLU(), nn.Linear(variant['policy_kwargs']['hidden_dim'],variant['policy_kwargs']['hidden_dim']), nn.ReLU(), SplitLayer(layers=[nn.Linear(variant['policy_kwargs']['hidden_dim'],action_dim), nn.Linear(variant['policy_kwargs']['hidden_dim'],action_dim)]) ) from rlkit.torch.policies.tanh_gaussian_policy import TanhGaussianPolicy policy = TanhGaussianPolicy(module=policy) from rlkit.torch.policies.make_deterministic import MakeDeterministic eval_policy = MakeDeterministic(policy) from rlkit.exploration_strategies.base import PolicyWrappedWithExplorationStrategy if variant['random_exploration']: from rlkit.exploration_strategies.epsilon_greedy import EpsilonGreedy expl_policy = PolicyWrappedWithExplorationStrategy( exploration_strategy=EpsilonGreedy(expl_env.action_space, prob_random_action=1.0), policy=policy, ) else: expl_policy = policy policy_n.append(policy) eval_policy_n.append(eval_policy) expl_policy_n.append(expl_policy) from rlkit.samplers.data_collector.ma_path_collector import MAMdpPathCollector eval_path_collector = MAMdpPathCollector(eval_env, eval_policy_n) expl_path_collector = MAMdpPathCollector(expl_env, expl_policy_n) from rlkit.data_management.ma_env_replay_buffer import MAEnvReplayBuffer replay_buffer = MAEnvReplayBuffer(variant['replay_buffer_size'], expl_env, num_agent=num_agent) from rlkit.torch.masac.masac_gnn import MASACGNNTrainer trainer = MASACGNNTrainer( env = expl_env, qf1=qf1, target_qf1=target_qf1, qf2=qf2, target_qf2=target_qf2, policy_n=policy_n, **variant['trainer_kwargs'] ) from rlkit.torch.torch_rl_algorithm import TorchBatchRLAlgorithm algorithm = TorchBatchRLAlgorithm( trainer=trainer, exploration_env=expl_env, evaluation_env=eval_env, exploration_data_collector=expl_path_collector, evaluation_data_collector=eval_path_collector, replay_buffer=replay_buffer, log_path_function=get_generic_ma_path_information, **variant['algorithm_kwargs'] ) algorithm.to(ptu.device) algorithm.train() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument('--exp_name', type=str, default='zero_sum') parser.add_argument('--log_dir', type=str, default='MASACGNNGaussian') parser.add_argument('--conv', type=str, default='GSage') parser.add_argument('--hidden', type=int, default=16) parser.add_argument('--oa', action='store_true', default=False) # online action parser.add_argument('--snl', action='store_true', default=False) # sum n loss parser.add_argument('--re', action='store_true', default=False) # random exploration parser.add_argument('--alpha', type=float, default=None) # init alpha parser.add_argument('--fa', action='store_true', default=False) # fix alpha parser.add_argument('--lr', type=float, default=None) parser.add_argument('--bs', type=int, default=None) parser.add_argument('--epoch', type=int, default=None) parser.add_argument('--seed', type=int, default=0) parser.add_argument('--snapshot_mode', type=str, default="gap_and_last") parser.add_argument('--snapshot_gap', type=int, default=500) args = parser.parse_args() import os.path as osp pre_dir = './Data/'+args.exp_name main_dir = args.log_dir\ +args.conv\ +('hidden'+str(args.hidden))\ +('oa' if args.oa else '')\ +('snl' if args.snl else '')\ +('re' if args.re else '')\ +(('alpha'+str(args.alpha)) if args.alpha else '')\ +('fa' if args.fa else '')\ +(('lr'+str(args.lr)) if args.lr else '')\ +(('bs'+str(args.bs)) if args.bs else '') log_dir = osp.join(pre_dir,main_dir,'seed'+str(args.seed)) # noinspection PyTypeChecker variant = dict( num_agent=2, random_exploration=args.re, algorithm_kwargs=dict( num_epochs=(args.epoch if args.epoch else 100), num_eval_steps_per_epoch=100, num_trains_per_train_loop=100, num_expl_steps_per_train_loop=100, min_num_steps_before_training=100, max_path_length=100, batch_size=(args.bs if args.bs else 256), ), trainer_kwargs=dict( use_soft_update=True, tau=1e-2, discount=0.99, qf_learning_rate=(args.lr if args.lr else 1e-3), policy_learning_rate=(args.lr if args.lr else 1e-4), online_action=args.oa, sum_n_loss=args.snl, init_alpha=(args.alpha if args.alpha else 1.), use_automatic_entropy_tuning=(not args.fa), ), qf_kwargs=dict( conv_type=args.conv, hidden_dim=args.hidden, ), policy_kwargs=dict( hidden_dim=args.hidden, ), replay_buffer_size=int(1E6), ) import os if not os.path.isdir(log_dir): os.makedirs(log_dir) with open(osp.join(log_dir,'variant.json'),'w') as out_json: import json json.dump(variant,out_json,indent=2) import sys cmd_input = 'python ' + ' '.join(sys.argv) + '\n' with open(osp.join(log_dir, 'cmd_input.txt'), 'a') as f: f.write(cmd_input) setup_logger(args.exp_name+'/'+main_dir, variant=variant, snapshot_mode=args.snapshot_mode, snapshot_gap=args.snapshot_gap, log_dir=log_dir) import numpy as np import torch np.random.seed(args.seed) torch.manual_seed(args.seed) # ptu.set_gpu_mode(True) # optionally set the GPU (default=False) experiment(variant)
40.895522
101
0.647324
import copy import torch.nn as nn from rlkit.launchers.launcher_util import setup_logger import rlkit.torch.pytorch_util as ptu from rlkit.core.ma_eval_util import get_generic_ma_path_information def experiment(variant): num_agent = variant['num_agent'] from differential_game import DifferentialGame expl_env = DifferentialGame(game_name=args.exp_name) eval_env = DifferentialGame(game_name=args.exp_name) obs_dim = eval_env.observation_space.low.size action_dim = eval_env.action_space.low.size from rlkit.torch.networks.graph_builders import FullGraphBuilder graph_builder1 = FullGraphBuilder( input_node_dim=obs_dim+action_dim, num_node=num_agent, contain_self_loop=False) from rlkit.torch.networks.gnn_networks import GNNNet gnn1 = GNNNet( graph_builder1, node_dim=variant['qf_kwargs']['hidden_dim'], conv_type=variant['qf_kwargs']['conv_type'], num_conv_layers=1, hidden_activation='relu', output_activation='relu', ) qf1 = nn.Sequential( gnn1, nn.Linear(variant['qf_kwargs']['hidden_dim'],1) ) target_qf1 = copy.deepcopy(qf1) from rlkit.torch.networks.graph_builders import FullGraphBuilder graph_builder2 = FullGraphBuilder( input_node_dim=obs_dim+action_dim, num_node=num_agent, contain_self_loop=False) from rlkit.torch.networks.gnn_networks import GNNNet gnn2 = GNNNet( graph_builder2, node_dim=variant['qf_kwargs']['hidden_dim'], conv_type=variant['qf_kwargs']['conv_type'], num_conv_layers=1, hidden_activation='relu', output_activation='relu', ) qf2 = nn.Sequential( gnn2, nn.Linear(variant['qf_kwargs']['hidden_dim'],1) ) target_qf2 = copy.deepcopy(qf2) policy_n, eval_policy_n, expl_policy_n = [], [], [] for i in range(num_agent): from rlkit.torch.networks.layers import SplitLayer policy = nn.Sequential( nn.Linear(obs_dim,variant['policy_kwargs']['hidden_dim']), nn.ReLU(), nn.Linear(variant['policy_kwargs']['hidden_dim'],variant['policy_kwargs']['hidden_dim']), nn.ReLU(), SplitLayer(layers=[nn.Linear(variant['policy_kwargs']['hidden_dim'],action_dim), nn.Linear(variant['policy_kwargs']['hidden_dim'],action_dim)]) ) from rlkit.torch.policies.tanh_gaussian_policy import TanhGaussianPolicy policy = TanhGaussianPolicy(module=policy) from rlkit.torch.policies.make_deterministic import MakeDeterministic eval_policy = MakeDeterministic(policy) from rlkit.exploration_strategies.base import PolicyWrappedWithExplorationStrategy if variant['random_exploration']: from rlkit.exploration_strategies.epsilon_greedy import EpsilonGreedy expl_policy = PolicyWrappedWithExplorationStrategy( exploration_strategy=EpsilonGreedy(expl_env.action_space, prob_random_action=1.0), policy=policy, ) else: expl_policy = policy policy_n.append(policy) eval_policy_n.append(eval_policy) expl_policy_n.append(expl_policy) from rlkit.samplers.data_collector.ma_path_collector import MAMdpPathCollector eval_path_collector = MAMdpPathCollector(eval_env, eval_policy_n) expl_path_collector = MAMdpPathCollector(expl_env, expl_policy_n) from rlkit.data_management.ma_env_replay_buffer import MAEnvReplayBuffer replay_buffer = MAEnvReplayBuffer(variant['replay_buffer_size'], expl_env, num_agent=num_agent) from rlkit.torch.masac.masac_gnn import MASACGNNTrainer trainer = MASACGNNTrainer( env = expl_env, qf1=qf1, target_qf1=target_qf1, qf2=qf2, target_qf2=target_qf2, policy_n=policy_n, **variant['trainer_kwargs'] ) from rlkit.torch.torch_rl_algorithm import TorchBatchRLAlgorithm algorithm = TorchBatchRLAlgorithm( trainer=trainer, exploration_env=expl_env, evaluation_env=eval_env, exploration_data_collector=expl_path_collector, evaluation_data_collector=eval_path_collector, replay_buffer=replay_buffer, log_path_function=get_generic_ma_path_information, **variant['algorithm_kwargs'] ) algorithm.to(ptu.device) algorithm.train() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument('--exp_name', type=str, default='zero_sum') parser.add_argument('--log_dir', type=str, default='MASACGNNGaussian') parser.add_argument('--conv', type=str, default='GSage') parser.add_argument('--hidden', type=int, default=16) parser.add_argument('--oa', action='store_true', default=False) parser.add_argument('--snl', action='store_true', default=False) parser.add_argument('--re', action='store_true', default=False) parser.add_argument('--alpha', type=float, default=None) parser.add_argument('--fa', action='store_true', default=False) parser.add_argument('--lr', type=float, default=None) parser.add_argument('--bs', type=int, default=None) parser.add_argument('--epoch', type=int, default=None) parser.add_argument('--seed', type=int, default=0) parser.add_argument('--snapshot_mode', type=str, default="gap_and_last") parser.add_argument('--snapshot_gap', type=int, default=500) args = parser.parse_args() import os.path as osp pre_dir = './Data/'+args.exp_name main_dir = args.log_dir\ +args.conv\ +('hidden'+str(args.hidden))\ +('oa' if args.oa else '')\ +('snl' if args.snl else '')\ +('re' if args.re else '')\ +(('alpha'+str(args.alpha)) if args.alpha else '')\ +('fa' if args.fa else '')\ +(('lr'+str(args.lr)) if args.lr else '')\ +(('bs'+str(args.bs)) if args.bs else '') log_dir = osp.join(pre_dir,main_dir,'seed'+str(args.seed)) variant = dict( num_agent=2, random_exploration=args.re, algorithm_kwargs=dict( num_epochs=(args.epoch if args.epoch else 100), num_eval_steps_per_epoch=100, num_trains_per_train_loop=100, num_expl_steps_per_train_loop=100, min_num_steps_before_training=100, max_path_length=100, batch_size=(args.bs if args.bs else 256), ), trainer_kwargs=dict( use_soft_update=True, tau=1e-2, discount=0.99, qf_learning_rate=(args.lr if args.lr else 1e-3), policy_learning_rate=(args.lr if args.lr else 1e-4), online_action=args.oa, sum_n_loss=args.snl, init_alpha=(args.alpha if args.alpha else 1.), use_automatic_entropy_tuning=(not args.fa), ), qf_kwargs=dict( conv_type=args.conv, hidden_dim=args.hidden, ), policy_kwargs=dict( hidden_dim=args.hidden, ), replay_buffer_size=int(1E6), ) import os if not os.path.isdir(log_dir): os.makedirs(log_dir) with open(osp.join(log_dir,'variant.json'),'w') as out_json: import json json.dump(variant,out_json,indent=2) import sys cmd_input = 'python ' + ' '.join(sys.argv) + '\n' with open(osp.join(log_dir, 'cmd_input.txt'), 'a') as f: f.write(cmd_input) setup_logger(args.exp_name+'/'+main_dir, variant=variant, snapshot_mode=args.snapshot_mode, snapshot_gap=args.snapshot_gap, log_dir=log_dir) import numpy as np import torch np.random.seed(args.seed) torch.manual_seed(args.seed) experiment(variant)
true
true
f70402c5d37a8546f11db5fc5373c0510be77024
6,232
py
Python
corehq/apps/reports/v2/formatters/cases.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
1
2020-07-14T13:00:23.000Z
2020-07-14T13:00:23.000Z
corehq/apps/reports/v2/formatters/cases.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
94
2020-12-11T06:57:31.000Z
2022-03-15T10:24:06.000Z
corehq/apps/reports/v2/formatters/cases.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
null
null
null
from django.urls import NoReverseMatch from django.utils import html from django.utils.translation import ugettext as _ from couchdbkit import ResourceNotFound from casexml.apps.case.models import CommCareCaseAction from corehq.apps.case_search.const import ( CASE_COMPUTED_METADATA, SPECIAL_CASE_PROPERTIES, SPECIAL_CASE_PROPERTIES_MAP, ) from corehq.apps.es.case_search import flatten_result from corehq.apps.groups.models import Group from corehq.apps.locations.models import SQLLocation from corehq.apps.reports.v2.models import BaseDataFormatter from corehq.apps.reports.v2.utils import report_date_to_json from corehq.apps.users.models import CouchUser from corehq.util.quickcache import quickcache from corehq.util.timezones.utils import parse_date from corehq.util.view_utils import absolute_reverse class CaseDataFormatter(BaseDataFormatter): def __init__(self, request, domain, raw_data): super(CaseDataFormatter, self).__init__(request, domain, raw_data) self.raw_data = flatten_result(raw_data) @property def owner_id(self): """Special Case Property @owner_id""" if 'owner_id' in self.raw_data: return self.raw_data.get('owner_id') elif 'user_id' in self.raw_data: return self.raw_data.gert('user_id') else: return '' @property def date_opened(self): """Special Case Property date_opened""" return self._fmt_dateprop('opened_on', False) @property def last_modified(self): """Special Case Property last_modified""" return self._fmt_dateprop('modified_on', False) @property def closed_by_username(self): """Computed metadata""" return self._get_username(self.closed_by_user_id) @property def last_modified_by_user_username(self): """Computed metadata""" return self._get_username(self.raw_data.get('user_id')) @property def opened_by_username(self): """Computed metadata""" user = self._creating_user if user is None: return _("No Data") return user['name'] or self._user_not_found_display(user['id']) @property def owner_name(self): """Computed metadata""" owner_type, owner = self._owner if owner_type == 'group': return '<span class="label label-default">%s</span>' % owner['name'] return owner['name'] @property def closed_by_user_id(self): """Computed metadata""" return self.raw_data.get('closed_by') @property def opened_by_user_id(self): """Computed metadata""" user = self._creating_user if user is None: return _("No data") return user['id'] @property def server_last_modified_date(self): """Computed metadata""" return self._fmt_dateprop('server_modified_on', False) def get_context(self): context = {} context.update(self.raw_data) context.update(self._case_info_context) context['_link'] = self._link return context @property def _link(self): try: return absolute_reverse( 'case_data', args=[self.domain, self.raw_data.get('_id')] ) except NoReverseMatch: return None @property def _case_info_context(self): context = {} for prop in SPECIAL_CASE_PROPERTIES + CASE_COMPUTED_METADATA: context[prop] = self._get_case_info_prop(prop) return context def _get_case_info_prop(self, prop): fmt_prop = prop.replace('@', '') if hasattr(self, fmt_prop): return getattr(self, fmt_prop) elif prop in SPECIAL_CASE_PROPERTIES: return self._get_special_property(prop) raise NotImplementedError( "CaseDataFormatter.{} not found".format(prop)) def _get_special_property(self, prop): return (SPECIAL_CASE_PROPERTIES_MAP[prop] .value_getter(self.raw_data)) def _fmt_dateprop(self, prop, iso=True): val = report_date_to_json( self.request, self.domain, parse_date(self.raw_data[prop]) ) if iso: val = 'T'.join(val.split(' ')) if val else None return val @property @quickcache(['self.owner_id']) def _owning_group(self): try: return Group.get(self.owner_id) except ResourceNotFound: return None @property @quickcache(['self.owner_id']) def _location(self): return SQLLocation.objects.get_or_None(location_id=self.owner_id) @property @quickcache(['self.owner_id']) def _owner(self): if self._owning_group and self._owning_group.name: return ('group', {'id': self._owning_group._id, 'name': self._owning_group.name}) elif self._location: return ('location', {'id': self._location.location_id, 'name': self._location.display_name}) return ('user', self._user_meta(self.owner_id)) @property def _creating_user(self): try: creator_id = self.raw_data['opened_by'] except KeyError: creator_id = None if 'actions' in self.raw_data: for action in self.raw_data['actions']: if action['action_type'] == 'create': action_doc = CommCareCaseAction.wrap(action) creator_id = action_doc.get_user_id() break if not creator_id: return None return self._user_meta(creator_id) def _user_meta(self, user_id): return {'id': user_id, 'name': self._get_username(user_id)} def _user_not_found_display(self, user_id): return _("Unknown [%s]") % user_id @quickcache(['user_id']) def _get_username(self, user_id): if not user_id: return None try: user = CouchUser.get_by_user_id(user_id) if user: return user.username except CouchUser.AccountTypeError: return None
31.316583
80
0.627246
from django.urls import NoReverseMatch from django.utils import html from django.utils.translation import ugettext as _ from couchdbkit import ResourceNotFound from casexml.apps.case.models import CommCareCaseAction from corehq.apps.case_search.const import ( CASE_COMPUTED_METADATA, SPECIAL_CASE_PROPERTIES, SPECIAL_CASE_PROPERTIES_MAP, ) from corehq.apps.es.case_search import flatten_result from corehq.apps.groups.models import Group from corehq.apps.locations.models import SQLLocation from corehq.apps.reports.v2.models import BaseDataFormatter from corehq.apps.reports.v2.utils import report_date_to_json from corehq.apps.users.models import CouchUser from corehq.util.quickcache import quickcache from corehq.util.timezones.utils import parse_date from corehq.util.view_utils import absolute_reverse class CaseDataFormatter(BaseDataFormatter): def __init__(self, request, domain, raw_data): super(CaseDataFormatter, self).__init__(request, domain, raw_data) self.raw_data = flatten_result(raw_data) @property def owner_id(self): if 'owner_id' in self.raw_data: return self.raw_data.get('owner_id') elif 'user_id' in self.raw_data: return self.raw_data.gert('user_id') else: return '' @property def date_opened(self): return self._fmt_dateprop('opened_on', False) @property def last_modified(self): return self._fmt_dateprop('modified_on', False) @property def closed_by_username(self): return self._get_username(self.closed_by_user_id) @property def last_modified_by_user_username(self): return self._get_username(self.raw_data.get('user_id')) @property def opened_by_username(self): user = self._creating_user if user is None: return _("No Data") return user['name'] or self._user_not_found_display(user['id']) @property def owner_name(self): owner_type, owner = self._owner if owner_type == 'group': return '<span class="label label-default">%s</span>' % owner['name'] return owner['name'] @property def closed_by_user_id(self): return self.raw_data.get('closed_by') @property def opened_by_user_id(self): user = self._creating_user if user is None: return _("No data") return user['id'] @property def server_last_modified_date(self): return self._fmt_dateprop('server_modified_on', False) def get_context(self): context = {} context.update(self.raw_data) context.update(self._case_info_context) context['_link'] = self._link return context @property def _link(self): try: return absolute_reverse( 'case_data', args=[self.domain, self.raw_data.get('_id')] ) except NoReverseMatch: return None @property def _case_info_context(self): context = {} for prop in SPECIAL_CASE_PROPERTIES + CASE_COMPUTED_METADATA: context[prop] = self._get_case_info_prop(prop) return context def _get_case_info_prop(self, prop): fmt_prop = prop.replace('@', '') if hasattr(self, fmt_prop): return getattr(self, fmt_prop) elif prop in SPECIAL_CASE_PROPERTIES: return self._get_special_property(prop) raise NotImplementedError( "CaseDataFormatter.{} not found".format(prop)) def _get_special_property(self, prop): return (SPECIAL_CASE_PROPERTIES_MAP[prop] .value_getter(self.raw_data)) def _fmt_dateprop(self, prop, iso=True): val = report_date_to_json( self.request, self.domain, parse_date(self.raw_data[prop]) ) if iso: val = 'T'.join(val.split(' ')) if val else None return val @property @quickcache(['self.owner_id']) def _owning_group(self): try: return Group.get(self.owner_id) except ResourceNotFound: return None @property @quickcache(['self.owner_id']) def _location(self): return SQLLocation.objects.get_or_None(location_id=self.owner_id) @property @quickcache(['self.owner_id']) def _owner(self): if self._owning_group and self._owning_group.name: return ('group', {'id': self._owning_group._id, 'name': self._owning_group.name}) elif self._location: return ('location', {'id': self._location.location_id, 'name': self._location.display_name}) return ('user', self._user_meta(self.owner_id)) @property def _creating_user(self): try: creator_id = self.raw_data['opened_by'] except KeyError: creator_id = None if 'actions' in self.raw_data: for action in self.raw_data['actions']: if action['action_type'] == 'create': action_doc = CommCareCaseAction.wrap(action) creator_id = action_doc.get_user_id() break if not creator_id: return None return self._user_meta(creator_id) def _user_meta(self, user_id): return {'id': user_id, 'name': self._get_username(user_id)} def _user_not_found_display(self, user_id): return _("Unknown [%s]") % user_id @quickcache(['user_id']) def _get_username(self, user_id): if not user_id: return None try: user = CouchUser.get_by_user_id(user_id) if user: return user.username except CouchUser.AccountTypeError: return None
true
true
f704031ca2654ce12a768055b21aa14bfcf015c4
4,595
py
Python
megaman/geometry/tests/test_adjacency.py
jrsassen/megaman
6583e462bc05c003c6c5e030ba993c5e30477720
[ "BSD-2-Clause" ]
303
2016-03-03T00:44:37.000Z
2022-03-14T03:43:38.000Z
megaman/geometry/tests/test_adjacency.py
YifuLiuL/megaman
faccaf267aad0a8b18ec8a705735fd9dd838ca1e
[ "BSD-2-Clause" ]
52
2016-02-26T21:41:31.000Z
2021-06-27T08:33:51.000Z
megaman/geometry/tests/test_adjacency.py
YifuLiuL/megaman
faccaf267aad0a8b18ec8a705735fd9dd838ca1e
[ "BSD-2-Clause" ]
67
2016-03-03T22:38:35.000Z
2022-01-12T08:03:47.000Z
# LICENSE: Simplified BSD https://github.com/mmp2/megaman/blob/master/LICENSE from nose import SkipTest import numpy as np from numpy.testing import assert_allclose, assert_raises, assert_equal from scipy.sparse import isspmatrix from scipy.spatial.distance import cdist, pdist, squareform from megaman.geometry import (Geometry, compute_adjacency_matrix, Adjacency, adjacency_methods) try: import pyflann as pyf NO_PYFLANN = False except ImportError: NO_PYFLANN = True def test_adjacency_methods(): assert_equal(set(adjacency_methods()), {'auto', 'pyflann', 'ball_tree', 'cyflann', 'brute', 'kd_tree'}) def test_adjacency_input_validation(): X = np.random.rand(20, 3) # need to specify radius or n_neighbors assert_raises(ValueError, compute_adjacency_matrix, X) # cannot specify both radius and n_neighbors assert_raises(ValueError, compute_adjacency_matrix, X, radius=1, n_neighbors=10) def test_adjacency(): rng = np.random.RandomState(36) X = rng.rand(100, 3) Gtrue = {} exact_methods = [m for m in Adjacency.methods() if not m.endswith('flann')] def check_kneighbors(n_neighbors, method): if method == 'pyflann' and NO_PYFLANN: raise SkipTest("pyflann not installed") G = compute_adjacency_matrix(X, method=method, n_neighbors=n_neighbors) assert isspmatrix(G) assert G.shape == (X.shape[0], X.shape[0]) if method in exact_methods: assert_allclose(G.toarray(), Gtrue[n_neighbors].toarray()) def check_radius(radius, method): if method == 'pyflann' and NO_PYFLANN: raise SkipTest("pyflann not installed") G = compute_adjacency_matrix(X, method=method, radius=radius) assert isspmatrix(G) assert G.shape == (X.shape[0], X.shape[0]) if method in exact_methods: assert_allclose(G.toarray(), Gtrue[radius].toarray()) for n_neighbors in [5, 10, 15]: Gtrue[n_neighbors] = compute_adjacency_matrix(X, method='brute', n_neighbors=n_neighbors) for method in Adjacency.methods(): yield check_kneighbors, n_neighbors, method for radius in [0.1, 0.5, 1.0]: Gtrue[radius] = compute_adjacency_matrix(X, method='brute', radius=radius) for method in Adjacency.methods(): yield check_radius, radius, method def test_unknown_method(): X = np.arange(20).reshape((10, 2)) assert_raises(ValueError, compute_adjacency_matrix, X, 'foo') def test_all_methods_close(): rand = np.random.RandomState(36) X = rand.randn(10, 2) D_true = squareform(pdist(X)) D_true[D_true > 0.5] = 0 def check_method(method): kwargs = {} if method == 'pyflann': try: import pyflann as pyf except ImportError: raise SkipTest("pyflann not installed.") flindex = pyf.FLANN() flindex.build_index(X, algorithm='kmeans', target_precision=0.9) kwargs['flann_index'] = flindex this_D = compute_adjacency_matrix(X, method=method, radius=0.5, **kwargs) assert_allclose(this_D.toarray(), D_true, rtol=1E-5) for method in ['auto', 'cyflann', 'pyflann', 'brute']: yield check_method, method def test_custom_adjacency(): class CustomAdjacency(Adjacency): name = "custom" def adjacency_graph(self, X): return squareform(pdist(X)) rand = np.random.RandomState(42) X = rand.rand(10, 2) D = compute_adjacency_matrix(X, method='custom', radius=1) assert_allclose(D, cdist(X, X)) Adjacency._remove_from_registry("custom") def test_cyflann_index_type(): rand = np.random.RandomState(36) X = rand.randn(10, 2) D_true = squareform(pdist(X)) D_true[D_true > 1.5] = 0 def check_index_type(index_type): method = 'cyflann' radius = 1.5 cyflann_kwds = {'index_type':index_type} adjacency_kwds = {'radius':radius, 'cyflann_kwds':cyflann_kwds} this_D = compute_adjacency_matrix(X=X, method = 'cyflann', **adjacency_kwds) assert_allclose(this_D.toarray(), D_true, rtol=1E-5, atol=1E-5) for index_type in ['kmeans', 'kdtrees']: yield check_index_type, index_type
33.540146
84
0.618063
from nose import SkipTest import numpy as np from numpy.testing import assert_allclose, assert_raises, assert_equal from scipy.sparse import isspmatrix from scipy.spatial.distance import cdist, pdist, squareform from megaman.geometry import (Geometry, compute_adjacency_matrix, Adjacency, adjacency_methods) try: import pyflann as pyf NO_PYFLANN = False except ImportError: NO_PYFLANN = True def test_adjacency_methods(): assert_equal(set(adjacency_methods()), {'auto', 'pyflann', 'ball_tree', 'cyflann', 'brute', 'kd_tree'}) def test_adjacency_input_validation(): X = np.random.rand(20, 3) assert_raises(ValueError, compute_adjacency_matrix, X) assert_raises(ValueError, compute_adjacency_matrix, X, radius=1, n_neighbors=10) def test_adjacency(): rng = np.random.RandomState(36) X = rng.rand(100, 3) Gtrue = {} exact_methods = [m for m in Adjacency.methods() if not m.endswith('flann')] def check_kneighbors(n_neighbors, method): if method == 'pyflann' and NO_PYFLANN: raise SkipTest("pyflann not installed") G = compute_adjacency_matrix(X, method=method, n_neighbors=n_neighbors) assert isspmatrix(G) assert G.shape == (X.shape[0], X.shape[0]) if method in exact_methods: assert_allclose(G.toarray(), Gtrue[n_neighbors].toarray()) def check_radius(radius, method): if method == 'pyflann' and NO_PYFLANN: raise SkipTest("pyflann not installed") G = compute_adjacency_matrix(X, method=method, radius=radius) assert isspmatrix(G) assert G.shape == (X.shape[0], X.shape[0]) if method in exact_methods: assert_allclose(G.toarray(), Gtrue[radius].toarray()) for n_neighbors in [5, 10, 15]: Gtrue[n_neighbors] = compute_adjacency_matrix(X, method='brute', n_neighbors=n_neighbors) for method in Adjacency.methods(): yield check_kneighbors, n_neighbors, method for radius in [0.1, 0.5, 1.0]: Gtrue[radius] = compute_adjacency_matrix(X, method='brute', radius=radius) for method in Adjacency.methods(): yield check_radius, radius, method def test_unknown_method(): X = np.arange(20).reshape((10, 2)) assert_raises(ValueError, compute_adjacency_matrix, X, 'foo') def test_all_methods_close(): rand = np.random.RandomState(36) X = rand.randn(10, 2) D_true = squareform(pdist(X)) D_true[D_true > 0.5] = 0 def check_method(method): kwargs = {} if method == 'pyflann': try: import pyflann as pyf except ImportError: raise SkipTest("pyflann not installed.") flindex = pyf.FLANN() flindex.build_index(X, algorithm='kmeans', target_precision=0.9) kwargs['flann_index'] = flindex this_D = compute_adjacency_matrix(X, method=method, radius=0.5, **kwargs) assert_allclose(this_D.toarray(), D_true, rtol=1E-5) for method in ['auto', 'cyflann', 'pyflann', 'brute']: yield check_method, method def test_custom_adjacency(): class CustomAdjacency(Adjacency): name = "custom" def adjacency_graph(self, X): return squareform(pdist(X)) rand = np.random.RandomState(42) X = rand.rand(10, 2) D = compute_adjacency_matrix(X, method='custom', radius=1) assert_allclose(D, cdist(X, X)) Adjacency._remove_from_registry("custom") def test_cyflann_index_type(): rand = np.random.RandomState(36) X = rand.randn(10, 2) D_true = squareform(pdist(X)) D_true[D_true > 1.5] = 0 def check_index_type(index_type): method = 'cyflann' radius = 1.5 cyflann_kwds = {'index_type':index_type} adjacency_kwds = {'radius':radius, 'cyflann_kwds':cyflann_kwds} this_D = compute_adjacency_matrix(X=X, method = 'cyflann', **adjacency_kwds) assert_allclose(this_D.toarray(), D_true, rtol=1E-5, atol=1E-5) for index_type in ['kmeans', 'kdtrees']: yield check_index_type, index_type
true
true
f704055f4bf5d04061e90afcc1d8780eda8a5540
3,944
py
Python
certbot/compat/os.py
daramousk/certbot
082040afb4c6542445ee8437a3dea61171706a80
[ "Apache-2.0" ]
null
null
null
certbot/compat/os.py
daramousk/certbot
082040afb4c6542445ee8437a3dea61171706a80
[ "Apache-2.0" ]
null
null
null
certbot/compat/os.py
daramousk/certbot
082040afb4c6542445ee8437a3dea61171706a80
[ "Apache-2.0" ]
null
null
null
""" This compat modules is a wrapper of the core os module that forbids usage of specific operations (e.g. chown, chmod, getuid) that would be harmful to the Windows file security model of Certbot. This module is intended to replace standard os module throughout certbot projects (except acme). """ # pylint: disable=function-redefined from __future__ import absolute_import # First round of wrapping: we import statically all public attributes exposed by the os module # This allows in particular to have pylint, mypy, IDEs be aware that most of os members are # available in certbot.compat.os. from os import * # type: ignore # pylint: disable=wildcard-import,unused-wildcard-import,redefined-builtin,os-module-forbidden # Second round of wrapping: we import dynamically all attributes from the os module that have not # yet been imported by the first round (static import). This covers in particular the case of # specific python 3.x versions where not all public attributes are in the special __all__ of os, # and so not in `from os import *`. import os as std_os # pylint: disable=os-module-forbidden import sys as std_sys ourselves = std_sys.modules[__name__] for attribute in dir(std_os): # Check if the attribute does not already exist in our module. It could be internal attributes # of the module (__name__, __doc__), or attributes from standard os already imported with # `from os import *`. if not hasattr(ourselves, attribute): setattr(ourselves, attribute, getattr(std_os, attribute)) # Similar to os.path, allow certbot.compat.os.path to behave as a module std_sys.modules[__name__ + '.path'] = path # Clean all remaining importables that are not from the core os module. del ourselves, std_os, std_sys # Chmod is the root of all evil for our security model on Windows. With the default implementation # of os.chmod on Windows, almost all bits on mode will be ignored, and only a general RO or RW will # be applied. The DACL, the inner mechanism to control file access on Windows, will stay on its # default definition, giving effectively at least read permissions to any one, as the default # permissions on root path will be inherit by the file (as NTFS state), and root path can be read # by anyone. So the given mode needs to be translated into a secured and not inherited DACL that # will be applied to this file using filesystem.chmod, calling internally the win32security # module to construct and apply the DACL. Complete security model to translate a POSIX mode into # a suitable DACL on Windows for Certbot can be found here: # https://github.com/certbot/certbot/issues/6356 # Basically, it states that appropriate permissions will be set for the owner, nothing for the # group, appropriate permissions for the "Everyone" group, and all permissions to the # "Administrators" group + "System" user, as they can do everything anyway. def chmod(*unused_args, **unused_kwargs): # pylint: disable=function-redefined """Method os.chmod() is forbidden""" raise RuntimeError('Usage of os.chmod() is forbidden. ' 'Use certbot.compat.filesystem.chmod() instead.') # Because of the blocking strategy on file handlers on Windows, rename does not behave as expected # with POSIX systems: an exception will be raised if dst already exists. def rename(*unused_args, **unused_kwargs): """Method os.rename() is forbidden""" raise RuntimeError('Usage of os.rename() is forbidden. ' 'Use certbot.compat.filesystem.replace() instead.') # Behavior of os.replace is consistent between Windows and Linux. However, it is not supported on # Python 2.x. So, as for os.rename, we forbid it in favor of filesystem.replace. def replace(*unused_args, **unused_kwargs): """Method os.replace() is forbidden""" raise RuntimeError('Usage of os.replace() is forbidden. ' 'Use certbot.compat.filesystem.replace() instead.')
58
128
0.751775
from __future__ import absolute_import from os import * import os as std_os import sys as std_sys ourselves = std_sys.modules[__name__] for attribute in dir(std_os): if not hasattr(ourselves, attribute): setattr(ourselves, attribute, getattr(std_os, attribute)) std_sys.modules[__name__ + '.path'] = path del ourselves, std_os, std_sys def chmod(*unused_args, **unused_kwargs): raise RuntimeError('Usage of os.chmod() is forbidden. ' 'Use certbot.compat.filesystem.chmod() instead.') def rename(*unused_args, **unused_kwargs): raise RuntimeError('Usage of os.rename() is forbidden. ' 'Use certbot.compat.filesystem.replace() instead.') def replace(*unused_args, **unused_kwargs): raise RuntimeError('Usage of os.replace() is forbidden. ' 'Use certbot.compat.filesystem.replace() instead.')
true
true
f704059b08f3fa4d248919124abf5b44bc1ec892
1,331
py
Python
pgp_stuff.py
d7d4af8/2047
bd6781b9502c6fdbd4745be5084977f679fa3fc5
[ "MIT" ]
35
2020-09-01T00:34:50.000Z
2022-03-29T13:14:15.000Z
pgp_stuff.py
d7d4af8/2047
bd6781b9502c6fdbd4745be5084977f679fa3fc5
[ "MIT" ]
3
2020-08-19T20:47:19.000Z
2021-09-06T23:55:49.000Z
pgp_stuff.py
d7d4af8/2047
bd6781b9502c6fdbd4745be5084977f679fa3fc5
[ "MIT" ]
10
2020-08-07T02:20:09.000Z
2022-01-30T06:43:45.000Z
from commons import * import os def pgp_check(): init_directory('./temp') # gpg must exist on your system status = os.system('gpg --version') if status==0: print_up('gpg is found') else: print_err('can\'t find gpg') def verify_publickey_message(pk, msg): # obtain a temp filename fn = get_random_hex_string(10) # save the public key file and the message file pkfn = f'./temp/{fn}.pk' pkbinfn = pkfn+'.gpg' msgfn = f'./temp/{fn}.msg' writefile(pkfn, pk, mode='w', encoding='utf-8') writefile(msgfn, msg, mode='w', encoding='utf-8') def cleanup(): removefile(pkfn) removefile(msgfn) removefile(pkbinfn) # remove armor status = os.system(f'gpg --dearmor {pkfn}') if status != 0: qprint('status:', status) cleanup() raise Exception('failed to dearmor the public key (there might be something wrong with your public key)') # verify status = os.system(f'gpg --no-default-keyring --keyring {pkbinfn} --verify {msgfn}') if status != 0: qprint('status:', status) cleanup() raise Exception('failed to verify the message (your public key is okay but the signature you supplied does not match the public key, or is of a wrong format)') cleanup() return True
28.319149
167
0.623591
from commons import * import os def pgp_check(): init_directory('./temp') status = os.system('gpg --version') if status==0: print_up('gpg is found') else: print_err('can\'t find gpg') def verify_publickey_message(pk, msg): # obtain a temp filename fn = get_random_hex_string(10) # save the public key file and the message file pkfn = f'./temp/{fn}.pk' pkbinfn = pkfn+'.gpg' msgfn = f'./temp/{fn}.msg' writefile(pkfn, pk, mode='w', encoding='utf-8') writefile(msgfn, msg, mode='w', encoding='utf-8') def cleanup(): removefile(pkfn) removefile(msgfn) removefile(pkbinfn) # remove armor status = os.system(f'gpg --dearmor {pkfn}') if status != 0: qprint('status:', status) cleanup() raise Exception('failed to dearmor the public key (there might be something wrong with your public key)') # verify status = os.system(f'gpg --no-default-keyring --keyring {pkbinfn} --verify {msgfn}') if status != 0: qprint('status:', status) cleanup() raise Exception('failed to verify the message (your public key is okay but the signature you supplied does not match the public key, or is of a wrong format)') cleanup() return True
true
true
f704068d246cb619970447be75314858f1109080
7,182
py
Python
venv/Tests/TupleTests.py
matthijsvanvliet/raytracing-python
73d692b47330ab94eedde579a51063e3a907e92b
[ "MIT" ]
1
2021-06-03T11:34:15.000Z
2021-06-03T11:34:15.000Z
venv/Tests/TupleTests.py
matthijsvanvliet/raytracing-python
73d692b47330ab94eedde579a51063e3a907e92b
[ "MIT" ]
null
null
null
venv/Tests/TupleTests.py
matthijsvanvliet/raytracing-python
73d692b47330ab94eedde579a51063e3a907e92b
[ "MIT" ]
null
null
null
import unittest import math from Include.Tuple import * # # Tuple Unit tests # class TestTuplePointVector(unittest.TestCase): def test_Tuple_ifWArgumentIsOneTupleIsPoint(self): self.a = Tuple(4.3, -4.2, 3.1, 1.0) self.assertEqual(self.a.x, 4.3) self.assertEqual(self.a.y, -4.2) self.assertEqual(self.a.z, 3.1) self.assertEqual(self.a.w, 1.0) self.assertEqual(self.a.get_type(), TupleTypes.POINT) self.assertNotEqual(self.a.get_type(), TupleTypes.VECTOR) def test_Tuple_ifWArgumentIsZeroTupleIsVector(self): self.a = Tuple(4.3, -4.2, 3.1, 0.0) self.assertEqual(self.a.x, 4.3) self.assertEqual(self.a.y, -4.2) self.assertEqual(self.a.z, 3.1) self.assertEqual(self.a.w, 0.0) self.assertEqual(self.a.get_type(), TupleTypes.VECTOR) self.assertNotEqual(self.a.get_type(), TupleTypes.POINT) class TestTupleArithmetic(unittest.TestCase): def test_Tuple_addTwoTuples(self): self.a1 = Tuple(3, -2, 5, 1) self.a2 = Tuple(-2, 3, 1, 0) self.result = self.a1 + self.a2 self.assertEqual(self.result, Tuple(1, 1, 6, 1)) def test_Tuple_subtractTwoPoints(self): self.p1 = point(3, 2, 1) self.p2 = point(5, 6, 7) self.result = self.p1 - self.p2 self.assertEqual(self.result, vector(-2, -4, -6)) def test_Tuple_subtractAVectorFromAPoint(self): self.p = point(3, 2, 1) self.v = vector(5, 6, 7) self.result = self.p - self.v self.assertEqual(self.result, point(-2, -4, -6)) def test_Tuple_subtractTwoVectors(self): self.v1 = vector(3, 2, 1) self.v2 = vector(5, 6, 7) self.result = self.v1 - self.v2 self.assertEqual(self.result, vector(-2, -4, -6)) def test_Tuple_subtractVectorFromZeroVector(self): self.zero = vector(0, 0, 0) self.v = vector(1, -2, 3) self.result = self.zero - self.v self.assertEqual(self.result, vector(-1, 2, -3)) def test_Tuple_negateATuple(self): self.a = Tuple(1, -2, 3, -4) self.result = -self.a self.assertEqual(self.result, Tuple(-1, 2, -3, 4)) def test_Tuple_multiplyATupleByAScalar(self): self.a = Tuple(1, -2, 3, -4) self.result = self.a * 3.5 self.assertEqual(self.result, Tuple(3.5, -7, 10.5, -14)) def test_Tuple_multiplyATupleByAFraction(self): self.a = Tuple(1, -2, 3, -4) self.result = self.a * 0.5 self.assertEqual(self.result, Tuple(0.5, -1, 1.5, -2)) def test_Tuple_divideATupleByAScalar(self): self.a = Tuple(1, -2, 3, -4) self.result = self.a / 2 self.assertEqual(self.result, Tuple(0.5, -1, 1.5, -2)) class TestTupleMagnitude(unittest.TestCase): def test_Tuple_computeTheMagnitudeWithVectorXComponentOne(self): self.v = vector(1, 0, 0) self.result = self.v.magnitude() self.assertEqual(self.result, 1) def test_Tuple_computeTheMagnitudeWithVectorYComponentOne(self): self.v = vector(0, 1, 0) self.result = self.v.magnitude() self.assertEqual(self.result, 1) def test_Tuple_computeTheMagnitudeWithVectorZComponentOne(self): self.v = vector(0, 0, 1) self.result = self.v.magnitude() self.assertEqual(self.result, 1) def test_Tuple_computeTheMagnitudeWithVectorOneTwoThree(self): self.v = vector(1, 2, 3) self.result = self.v.magnitude() self.assertEqual(self.result, math.sqrt(14)) def test_Tuple_computeTheMagnitudeWithVectorMinusOneTwoThree(self): self.v = vector(-1, -2, -3) self.result = self.v.magnitude() self.assertEqual(self.result, math.sqrt(14)) class TestTupleNormalize(unittest.TestCase): def test_Tuple_normalizeVectorWithXAsFour(self): self.v = vector(4, 0, 0) self.result = self.v.normalize() self.assertEqual(self.result, vector(1, 0, 0)) def test_Tuple_normalizeVectorMinusOneTwoThree(self): self.v = vector(1, 2, 3) self.magnitude = math.sqrt(14) self.result = self.v.normalize() self.assertEqual(self.result, vector(1/self.magnitude, 2/self.magnitude, 3/self.magnitude)) def test_Tuple_computeMagnitudeOfNormalizedVector(self): self.v = vector(1, 2, 3) self.norm = self.v.normalize() self.result = self.norm.magnitude() self.assertEqual(self.result, 1) class TestTupleDotProduct(unittest.TestCase): def test_Tuple_theDotProductOfTwoTuples(self): self.a = vector(1, 2, 3) self.b = vector(2, 3, 4) self.result = self.a.dot(self.b) self.assertEqual(self.result, 20) class TestTupleCrossProduct(unittest.TestCase): def test_Tuple_theCrossProductOfTwoVectors(self): self.a = vector(1, 2, 3) self.b = vector(2, 3, 4) self.result1 = self.a.cross(self.b) self.result2 = self.b.cross(self.a) self.assertEqual(self.result1, vector(-1, 2, -1)) self.assertEqual(self.result2, vector(1, -2, 1)) # # Color Struct Unit test # class TestTupleColor(unittest.TestCase): def test_Color_createsAColor(self): self.c = Color(-0.5, 0.4, 1.7) self.assertEqual(self.c.red, -0.5) self.assertEqual(self.c.green, 0.4) self.assertEqual(self.c.blue, 1.7) def test_Color_AddColors(self): self.c1 = Color(0.9, 0.6, 0.75) self.c2 = Color(0.7, 0.1, 0.25) self.result = self.c1 + self.c2 self.assertEqual(self.result, Color(1.6, 0.7, 1.0)) def test_Color_SubtractColors(self): self.c1 = Color(0.9, 0.6, 0.75) self.c2 = Color(0.6, 0.1, 0.25) self.result = self.c1 - self.c2 self.assertEqual(self.result, Color(0.3, 0.5, 0.5)) def test_Color_MultiplyColorWithScalar(self): self.c = Color(0.2, 0.3, 0.4) self.result = self.c * 2 self.assertEqual(self.result, Color(0.4, 0.6, 0.8)) def test_Color_MultiplyingColors(self): self.c1 = Color(1, 0.2, 0.4) self.c2 = Color(0.9, 1, 0.1) self.result = self.c1 * self.c2 self.assertEqual(self.result, Color(0.9, 0.2, 0.04)) def test_Tuple_reflectingAVectorApproachingAt45Degrees(self): self.v = vector(1, -1, 0) self.n = vector(0, 1, 0) self.r = self.v.reflect(self.n) self.assertEqual(self.r, vector(1, 1, 0)) def test_Tuple_reflectingAVectorOffASlantedSurface(self): self.v = vector(0, -1, 0) self.n = vector(math.sqrt(2)/2, math.sqrt(2)/2, 0) self.r = self.v.reflect(self.n) self.assertEqual(self.r, vector(1, 0, 0)) # # Point function Unit tests # class TestTuplePoint(unittest.TestCase): def test_point_functionCreatesATupleAsAPoint(self): self.point = point(4, -4, 3) self.assertEqual(self.point, Tuple(4, -4, 3, 1)) # # Vector function Unit tests # class TestTupleVector(unittest.TestCase): def test_vector_functionCreatesATupleAsAVector(self): self.vector = vector(4, -4, 3) self.assertEqual(self.vector, Tuple(4, -4, 3, 0)) if __name__ == '__main__': unittest.main()
34.528846
99
0.628376
import unittest import math from Include.Tuple import * class TestTuplePointVector(unittest.TestCase): def test_Tuple_ifWArgumentIsOneTupleIsPoint(self): self.a = Tuple(4.3, -4.2, 3.1, 1.0) self.assertEqual(self.a.x, 4.3) self.assertEqual(self.a.y, -4.2) self.assertEqual(self.a.z, 3.1) self.assertEqual(self.a.w, 1.0) self.assertEqual(self.a.get_type(), TupleTypes.POINT) self.assertNotEqual(self.a.get_type(), TupleTypes.VECTOR) def test_Tuple_ifWArgumentIsZeroTupleIsVector(self): self.a = Tuple(4.3, -4.2, 3.1, 0.0) self.assertEqual(self.a.x, 4.3) self.assertEqual(self.a.y, -4.2) self.assertEqual(self.a.z, 3.1) self.assertEqual(self.a.w, 0.0) self.assertEqual(self.a.get_type(), TupleTypes.VECTOR) self.assertNotEqual(self.a.get_type(), TupleTypes.POINT) class TestTupleArithmetic(unittest.TestCase): def test_Tuple_addTwoTuples(self): self.a1 = Tuple(3, -2, 5, 1) self.a2 = Tuple(-2, 3, 1, 0) self.result = self.a1 + self.a2 self.assertEqual(self.result, Tuple(1, 1, 6, 1)) def test_Tuple_subtractTwoPoints(self): self.p1 = point(3, 2, 1) self.p2 = point(5, 6, 7) self.result = self.p1 - self.p2 self.assertEqual(self.result, vector(-2, -4, -6)) def test_Tuple_subtractAVectorFromAPoint(self): self.p = point(3, 2, 1) self.v = vector(5, 6, 7) self.result = self.p - self.v self.assertEqual(self.result, point(-2, -4, -6)) def test_Tuple_subtractTwoVectors(self): self.v1 = vector(3, 2, 1) self.v2 = vector(5, 6, 7) self.result = self.v1 - self.v2 self.assertEqual(self.result, vector(-2, -4, -6)) def test_Tuple_subtractVectorFromZeroVector(self): self.zero = vector(0, 0, 0) self.v = vector(1, -2, 3) self.result = self.zero - self.v self.assertEqual(self.result, vector(-1, 2, -3)) def test_Tuple_negateATuple(self): self.a = Tuple(1, -2, 3, -4) self.result = -self.a self.assertEqual(self.result, Tuple(-1, 2, -3, 4)) def test_Tuple_multiplyATupleByAScalar(self): self.a = Tuple(1, -2, 3, -4) self.result = self.a * 3.5 self.assertEqual(self.result, Tuple(3.5, -7, 10.5, -14)) def test_Tuple_multiplyATupleByAFraction(self): self.a = Tuple(1, -2, 3, -4) self.result = self.a * 0.5 self.assertEqual(self.result, Tuple(0.5, -1, 1.5, -2)) def test_Tuple_divideATupleByAScalar(self): self.a = Tuple(1, -2, 3, -4) self.result = self.a / 2 self.assertEqual(self.result, Tuple(0.5, -1, 1.5, -2)) class TestTupleMagnitude(unittest.TestCase): def test_Tuple_computeTheMagnitudeWithVectorXComponentOne(self): self.v = vector(1, 0, 0) self.result = self.v.magnitude() self.assertEqual(self.result, 1) def test_Tuple_computeTheMagnitudeWithVectorYComponentOne(self): self.v = vector(0, 1, 0) self.result = self.v.magnitude() self.assertEqual(self.result, 1) def test_Tuple_computeTheMagnitudeWithVectorZComponentOne(self): self.v = vector(0, 0, 1) self.result = self.v.magnitude() self.assertEqual(self.result, 1) def test_Tuple_computeTheMagnitudeWithVectorOneTwoThree(self): self.v = vector(1, 2, 3) self.result = self.v.magnitude() self.assertEqual(self.result, math.sqrt(14)) def test_Tuple_computeTheMagnitudeWithVectorMinusOneTwoThree(self): self.v = vector(-1, -2, -3) self.result = self.v.magnitude() self.assertEqual(self.result, math.sqrt(14)) class TestTupleNormalize(unittest.TestCase): def test_Tuple_normalizeVectorWithXAsFour(self): self.v = vector(4, 0, 0) self.result = self.v.normalize() self.assertEqual(self.result, vector(1, 0, 0)) def test_Tuple_normalizeVectorMinusOneTwoThree(self): self.v = vector(1, 2, 3) self.magnitude = math.sqrt(14) self.result = self.v.normalize() self.assertEqual(self.result, vector(1/self.magnitude, 2/self.magnitude, 3/self.magnitude)) def test_Tuple_computeMagnitudeOfNormalizedVector(self): self.v = vector(1, 2, 3) self.norm = self.v.normalize() self.result = self.norm.magnitude() self.assertEqual(self.result, 1) class TestTupleDotProduct(unittest.TestCase): def test_Tuple_theDotProductOfTwoTuples(self): self.a = vector(1, 2, 3) self.b = vector(2, 3, 4) self.result = self.a.dot(self.b) self.assertEqual(self.result, 20) class TestTupleCrossProduct(unittest.TestCase): def test_Tuple_theCrossProductOfTwoVectors(self): self.a = vector(1, 2, 3) self.b = vector(2, 3, 4) self.result1 = self.a.cross(self.b) self.result2 = self.b.cross(self.a) self.assertEqual(self.result1, vector(-1, 2, -1)) self.assertEqual(self.result2, vector(1, -2, 1)) class TestTupleColor(unittest.TestCase): def test_Color_createsAColor(self): self.c = Color(-0.5, 0.4, 1.7) self.assertEqual(self.c.red, -0.5) self.assertEqual(self.c.green, 0.4) self.assertEqual(self.c.blue, 1.7) def test_Color_AddColors(self): self.c1 = Color(0.9, 0.6, 0.75) self.c2 = Color(0.7, 0.1, 0.25) self.result = self.c1 + self.c2 self.assertEqual(self.result, Color(1.6, 0.7, 1.0)) def test_Color_SubtractColors(self): self.c1 = Color(0.9, 0.6, 0.75) self.c2 = Color(0.6, 0.1, 0.25) self.result = self.c1 - self.c2 self.assertEqual(self.result, Color(0.3, 0.5, 0.5)) def test_Color_MultiplyColorWithScalar(self): self.c = Color(0.2, 0.3, 0.4) self.result = self.c * 2 self.assertEqual(self.result, Color(0.4, 0.6, 0.8)) def test_Color_MultiplyingColors(self): self.c1 = Color(1, 0.2, 0.4) self.c2 = Color(0.9, 1, 0.1) self.result = self.c1 * self.c2 self.assertEqual(self.result, Color(0.9, 0.2, 0.04)) def test_Tuple_reflectingAVectorApproachingAt45Degrees(self): self.v = vector(1, -1, 0) self.n = vector(0, 1, 0) self.r = self.v.reflect(self.n) self.assertEqual(self.r, vector(1, 1, 0)) def test_Tuple_reflectingAVectorOffASlantedSurface(self): self.v = vector(0, -1, 0) self.n = vector(math.sqrt(2)/2, math.sqrt(2)/2, 0) self.r = self.v.reflect(self.n) self.assertEqual(self.r, vector(1, 0, 0)) class TestTuplePoint(unittest.TestCase): def test_point_functionCreatesATupleAsAPoint(self): self.point = point(4, -4, 3) self.assertEqual(self.point, Tuple(4, -4, 3, 1)) class TestTupleVector(unittest.TestCase): def test_vector_functionCreatesATupleAsAVector(self): self.vector = vector(4, -4, 3) self.assertEqual(self.vector, Tuple(4, -4, 3, 0)) if __name__ == '__main__': unittest.main()
true
true
f704069f25b67e230b29ca48e4aa09f87f29dab3
4,481
py
Python
matchms/Metadata.py
maximskorik/matchms
922f5afaef123a793194bdd74391027477cbb844
[ "Apache-2.0" ]
null
null
null
matchms/Metadata.py
maximskorik/matchms
922f5afaef123a793194bdd74391027477cbb844
[ "Apache-2.0" ]
null
null
null
matchms/Metadata.py
maximskorik/matchms
922f5afaef123a793194bdd74391027477cbb844
[ "Apache-2.0" ]
null
null
null
from collections.abc import Mapping import numpy as np from pickydict import PickyDict from .utils import load_known_key_conversions _key_regex_replacements = {r"\s": "_", r"[!?.,;:]": ""} _key_replacements = load_known_key_conversions() class Metadata: """Class to handle spectrum metadata in matchms. Metadata entries will be stored as PickyDict dictionary in `metadata.data`. Unlike normal Python dictionaries, not all key names will be accepted. Key names will be forced to be lower-case to avoid confusions between key such as "Precursor_MZ" and "precursor_mz". To avoid the default harmonization of the metadata dictionary use the option `matchms_key_style=False`. Code example: .. code-block:: python metadata = Metadata({"Precursor_MZ": 201.5, "Compound Name": "SuperStuff"}) print(metadata["precursor_mz"]) # => 201.5 print(metadata["compound_name"]) # => SuperStuff Or if the matchms default metadata harmonization should not take place: .. code-block:: python metadata = Metadata({"Precursor_MZ": 201.5, "Compound Name": "SuperStuff"}, matchms_key_style=False) print(metadata["precursor_mz"]) # => 201.5 print(metadata["compound_name"]) # => None (now you need to use "compound name") """ def __init__(self, metadata: dict = None, matchms_key_style: bool = True): """ Parameters ---------- metadata: Spectrum metadata as a dictionary. matchms_key_style: Set to False if metadata harmonization to default keys is not desired. The default is True. """ if metadata is None: self._data = PickyDict({}) elif isinstance(metadata, Mapping): self._data = PickyDict(metadata) else: raise ValueError("Unexpected data type for metadata (should be dictionary, or None).") self.matchms_key_style = matchms_key_style if self.matchms_key_style is True: self.harmonize_metadata() def __eq__(self, other_metadata): if self.keys() != other_metadata.keys(): return False for key, value in self.items(): if isinstance(value, np.ndarray): if not np.all(value == other_metadata.get(key)): return False elif value != other_metadata.get(key): return False return True def harmonize_metadata(self): """Runs default harmonization of metadata. Method harmonized metadata field names which includes setting them to lower-case and runing a series of regex replacements followed by default field name replacements (such as precursor_mass --> precursor_mz). """ self._data.key_regex_replacements = _key_regex_replacements self._data.key_replacements = _key_replacements # ------------------------------ # Getters and Setters # ------------------------------ def get(self, key: str, default=None): """Retrieve value from :attr:`metadata` dict. """ return self._data.copy().get(key, default) def set(self, key: str, value): """Set value in :attr:`metadata` dict. """ self._data[key] = value if self.matchms_key_style is True: self.harmonize_metadata() return self def keys(self): """Retrieve all keys of :attr:`.metadata` dict. """ return self._data.keys() def values(self): """Retrieve all values of :attr:`.metadata` dict. """ return self._data.values() def items(self): """Retrieve all items (key, value pairs) of :attr:`.metadata` dict. """ return self._data.items() def __getitem__(self, key=None): return self.get(key) def __setitem__(self, key, newvalue): self.set(key, newvalue) @property def data(self): return self._data.copy() @data.setter def data(self, new_dict): if isinstance(new_dict, PickyDict): self._data = new_dict elif isinstance(new_dict, Mapping): self._data = PickyDict(new_dict) if self.matchms_key_style is True: self.harmonize_metadata() else: raise TypeError("Expected input of type dict or PickyDict.")
32.007143
98
0.60366
from collections.abc import Mapping import numpy as np from pickydict import PickyDict from .utils import load_known_key_conversions _key_regex_replacements = {r"\s": "_", r"[!?.,;:]": ""} _key_replacements = load_known_key_conversions() class Metadata: def __init__(self, metadata: dict = None, matchms_key_style: bool = True): if metadata is None: self._data = PickyDict({}) elif isinstance(metadata, Mapping): self._data = PickyDict(metadata) else: raise ValueError("Unexpected data type for metadata (should be dictionary, or None).") self.matchms_key_style = matchms_key_style if self.matchms_key_style is True: self.harmonize_metadata() def __eq__(self, other_metadata): if self.keys() != other_metadata.keys(): return False for key, value in self.items(): if isinstance(value, np.ndarray): if not np.all(value == other_metadata.get(key)): return False elif value != other_metadata.get(key): return False return True def harmonize_metadata(self): self._data.key_regex_replacements = _key_regex_replacements self._data.key_replacements = _key_replacements def get(self, key: str, default=None): return self._data.copy().get(key, default) def set(self, key: str, value): self._data[key] = value if self.matchms_key_style is True: self.harmonize_metadata() return self def keys(self): return self._data.keys() def values(self): return self._data.values() def items(self): return self._data.items() def __getitem__(self, key=None): return self.get(key) def __setitem__(self, key, newvalue): self.set(key, newvalue) @property def data(self): return self._data.copy() @data.setter def data(self, new_dict): if isinstance(new_dict, PickyDict): self._data = new_dict elif isinstance(new_dict, Mapping): self._data = PickyDict(new_dict) if self.matchms_key_style is True: self.harmonize_metadata() else: raise TypeError("Expected input of type dict or PickyDict.")
true
true
f70406af9f6a92249029dee780ff1c94a4916a76
10,990
py
Python
config/settings/base.py
kmvicky/webscrape
92ca9100e21de276ed8470621e1e3a7a6495d54d
[ "MIT" ]
null
null
null
config/settings/base.py
kmvicky/webscrape
92ca9100e21de276ed8470621e1e3a7a6495d54d
[ "MIT" ]
null
null
null
config/settings/base.py
kmvicky/webscrape
92ca9100e21de276ed8470621e1e3a7a6495d54d
[ "MIT" ]
null
null
null
""" Base settings to build other settings files upon. """ import environ ROOT_DIR = ( environ.Path(__file__) - 3 ) # (webscrape/config/settings/base.py - 3 = webscrape/) APPS_DIR = ROOT_DIR.path("webscrape") env = environ.Env() READ_DOT_ENV_FILE = env.bool("DJANGO_READ_DOT_ENV_FILE", default=False) if READ_DOT_ENV_FILE: # OS environment variables take precedence over variables from .env env.read_env(str(ROOT_DIR.path(".env"))) # GENERAL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = env.bool("DJANGO_DEBUG", False) # Local time zone. Choices are # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # though not all of them may be available with every OS. # In Windows, this must be set to your system time zone. TIME_ZONE = "UTC" # https://docs.djangoproject.com/en/dev/ref/settings/#language-code LANGUAGE_CODE = "en-us" # https://docs.djangoproject.com/en/dev/ref/settings/#site-id SITE_ID = 1 # https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n USE_I18N = True # https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n USE_L10N = True # https://docs.djangoproject.com/en/dev/ref/settings/#use-tz USE_TZ = True # https://docs.djangoproject.com/en/dev/ref/settings/#locale-paths LOCALE_PATHS = [ROOT_DIR.path("locale")] # DATABASES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#databases # DATABASES = { # "default": env.db("DATABASE_URL", default="postgres:///webscrape") # } # DATABASES["default"]["ATOMIC_REQUESTS"] = True DATABASES = { 'default': { 'NAME': 'messaging', 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'USER': 'messaging', 'PASSWORD': 'messaging', 'HOST': 'localhost', 'PORT': 5432, 'ATOMIC_REQUESTS': True } } # URLS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf ROOT_URLCONF = "config.urls" # https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application WSGI_APPLICATION = "config.wsgi.application" # APPS # ------------------------------------------------------------------------------ DJANGO_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.sites", "django.contrib.messages", "django.contrib.staticfiles", # "django.contrib.humanize", # Handy template tags "django.contrib.admin", ] THIRD_PARTY_APPS = [ "rest_framework", ] LOCAL_APPS = [ "webscrape.application.apps.ApplicationConfig", # Your stuff: custom apps go here ] # https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS # MIGRATIONS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules MIGRATION_MODULES = {"sites": "webscrape.contrib.sites.migrations"} # AUTHENTICATION # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends AUTHENTICATION_BACKENDS = [ "django.contrib.auth.backends.ModelBackend", "allauth.account.auth_backends.AuthenticationBackend", ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model # AUTH_USER_MODEL = "users.User" # https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url # LOGIN_REDIRECT_URL = "users:redirect" # https://docs.djangoproject.com/en/dev/ref/settings/#login-url # LOGIN_URL = "account_login" # PASSWORDS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers PASSWORD_HASHERS = [ # https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django "django.contrib.auth.hashers.Argon2PasswordHasher", "django.contrib.auth.hashers.PBKDF2PasswordHasher", "django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher", "django.contrib.auth.hashers.BCryptSHA256PasswordHasher", ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" }, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] # MIDDLEWARE # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#middleware MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.locale.LocaleMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", ] # STATIC # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#static-root STATIC_ROOT = str(ROOT_DIR("staticfiles")) # https://docs.djangoproject.com/en/dev/ref/settings/#static-url STATIC_URL = "/static/" # https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS STATICFILES_DIRS = [str(APPS_DIR.path("static"))] # https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders STATICFILES_FINDERS = [ "django.contrib.staticfiles.finders.FileSystemFinder", "django.contrib.staticfiles.finders.AppDirectoriesFinder", ] # MEDIA # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#media-root MEDIA_ROOT = str(APPS_DIR("media")) # https://docs.djangoproject.com/en/dev/ref/settings/#media-url MEDIA_URL = "/media/" # TEMPLATES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#templates TEMPLATES = [ { # https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND "BACKEND": "django.template.backends.django.DjangoTemplates", # https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs "DIRS": [str(APPS_DIR.path("templates"))], "OPTIONS": { # https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders # https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types "loaders": [ "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ], # https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.template.context_processors.i18n", "django.template.context_processors.media", "django.template.context_processors.static", "django.template.context_processors.tz", "django.contrib.messages.context_processors.messages", "webscrape.utils.context_processors.settings_context", ], }, } ] # http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs CRISPY_TEMPLATE_PACK = "bootstrap4" # FIXTURES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs FIXTURE_DIRS = (str(APPS_DIR.path("fixtures")),) # SECURITY # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-httponly SESSION_COOKIE_HTTPONLY = True # https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-httponly CSRF_COOKIE_HTTPONLY = True # https://docs.djangoproject.com/en/dev/ref/settings/#secure-browser-xss-filter SECURE_BROWSER_XSS_FILTER = True # https://docs.djangoproject.com/en/dev/ref/settings/#x-frame-options X_FRAME_OPTIONS = "DENY" # EMAIL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#email-backend EMAIL_BACKEND = env( "DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.smtp.EmailBackend" ) # https://docs.djangoproject.com/en/2.2/ref/settings/#email-timeout EMAIL_TIMEOUT = 5 # ADMIN # ------------------------------------------------------------------------------ # Django Admin URL. ADMIN_URL = "admin/" # https://docs.djangoproject.com/en/dev/ref/settings/#admins ADMINS = [("""Sukant Priyadarshi""", "sukant1994@gmail.com")] # https://docs.djangoproject.com/en/dev/ref/settings/#managers MANAGERS = ADMINS # LOGGING # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#logging # See https://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "verbose": { "format": "%(levelname)s %(asctime)s %(module)s " "%(process)d %(thread)d %(message)s" } }, "handlers": { "console": { "level": "DEBUG", "class": "logging.StreamHandler", "formatter": "verbose", } }, "root": {"level": "INFO", "handlers": ["console"]}, } # django-allauth # ------------------------------------------------------------------------------ ACCOUNT_ALLOW_REGISTRATION = env.bool("DJANGO_ACCOUNT_ALLOW_REGISTRATION", True) # https://django-allauth.readthedocs.io/en/latest/configuration.html ACCOUNT_AUTHENTICATION_METHOD = "username" # https://django-allauth.readthedocs.io/en/latest/configuration.html ACCOUNT_EMAIL_REQUIRED = True # https://django-allauth.readthedocs.io/en/latest/configuration.html ACCOUNT_EMAIL_VERIFICATION = "mandatory" # https://django-allauth.readthedocs.io/en/latest/configuration.html # ACCOUNT_ADAPTER = "webscrape.users.adapters.AccountAdapter" # https://django-allauth.readthedocs.io/en/latest/configuration.html # SOCIALACCOUNT_ADAPTER = "webscrape.users.adapters.SocialAccountAdapter" # Your stuff... # ------------------------------------------------------------------------------
39.818841
93
0.624477
import environ ROOT_DIR = ( environ.Path(__file__) - 3 ) APPS_DIR = ROOT_DIR.path("webscrape") env = environ.Env() READ_DOT_ENV_FILE = env.bool("DJANGO_READ_DOT_ENV_FILE", default=False) if READ_DOT_ENV_FILE: env.read_env(str(ROOT_DIR.path(".env"))) DEBUG = env.bool("DJANGO_DEBUG", False) TIME_ZONE = "UTC" LANGUAGE_CODE = "en-us" SITE_ID = 1 USE_I18N = True USE_L10N = True USE_TZ = True LOCALE_PATHS = [ROOT_DIR.path("locale")] DATABASES = { 'default': { 'NAME': 'messaging', 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'USER': 'messaging', 'PASSWORD': 'messaging', 'HOST': 'localhost', 'PORT': 5432, 'ATOMIC_REQUESTS': True } } ROOT_URLCONF = "config.urls" WSGI_APPLICATION = "config.wsgi.application" DJANGO_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.sites", "django.contrib.messages", "django.contrib.staticfiles", "django.contrib.admin", ] THIRD_PARTY_APPS = [ "rest_framework", ] LOCAL_APPS = [ "webscrape.application.apps.ApplicationConfig", ] INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS MIGRATION_MODULES = {"sites": "webscrape.contrib.sites.migrations"} AUTHENTICATION_BACKENDS = [ "django.contrib.auth.backends.ModelBackend", "allauth.account.auth_backends.AuthenticationBackend", ] PASSWORD_HASHERS = [ "django.contrib.auth.hashers.Argon2PasswordHasher", "django.contrib.auth.hashers.PBKDF2PasswordHasher", "django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher", "django.contrib.auth.hashers.BCryptSHA256PasswordHasher", ] AUTH_PASSWORD_VALIDATORS = [ { "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" }, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.locale.LocaleMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", ] STATIC_ROOT = str(ROOT_DIR("staticfiles")) STATIC_URL = "/static/" STATICFILES_DIRS = [str(APPS_DIR.path("static"))] STATICFILES_FINDERS = [ "django.contrib.staticfiles.finders.FileSystemFinder", "django.contrib.staticfiles.finders.AppDirectoriesFinder", ] MEDIA_ROOT = str(APPS_DIR("media")) MEDIA_URL = "/media/" TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": [str(APPS_DIR.path("templates"))], "OPTIONS": { "loaders": [ "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ], "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.template.context_processors.i18n", "django.template.context_processors.media", "django.template.context_processors.static", "django.template.context_processors.tz", "django.contrib.messages.context_processors.messages", "webscrape.utils.context_processors.settings_context", ], }, } ] CRISPY_TEMPLATE_PACK = "bootstrap4" FIXTURE_DIRS = (str(APPS_DIR.path("fixtures")),) SESSION_COOKIE_HTTPONLY = True CSRF_COOKIE_HTTPONLY = True SECURE_BROWSER_XSS_FILTER = True X_FRAME_OPTIONS = "DENY" EMAIL_BACKEND = env( "DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.smtp.EmailBackend" ) EMAIL_TIMEOUT = 5 ADMIN_URL = "admin/" ADMINS = [("""Sukant Priyadarshi""", "sukant1994@gmail.com")] MANAGERS = ADMINS LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "verbose": { "format": "%(levelname)s %(asctime)s %(module)s " "%(process)d %(thread)d %(message)s" } }, "handlers": { "console": { "level": "DEBUG", "class": "logging.StreamHandler", "formatter": "verbose", } }, "root": {"level": "INFO", "handlers": ["console"]}, } ACCOUNT_ALLOW_REGISTRATION = env.bool("DJANGO_ACCOUNT_ALLOW_REGISTRATION", True) ACCOUNT_AUTHENTICATION_METHOD = "username" ACCOUNT_EMAIL_REQUIRED = True ACCOUNT_EMAIL_VERIFICATION = "mandatory"
true
true
f70406ba633b169d9b426b484db582d346a358e5
1,250
py
Python
bench/create-large-number-objects.py
scopatz/PyTables
05a74def785688abd802224a5ba44393a701ebc7
[ "BSD-3-Clause" ]
9
2021-09-28T05:20:22.000Z
2022-03-16T11:09:06.000Z
bench/create-large-number-objects.py
scopatz/PyTables
05a74def785688abd802224a5ba44393a701ebc7
[ "BSD-3-Clause" ]
null
null
null
bench/create-large-number-objects.py
scopatz/PyTables
05a74def785688abd802224a5ba44393a701ebc7
[ "BSD-3-Clause" ]
9
2018-09-14T02:42:36.000Z
2021-07-12T02:37:45.000Z
"This creates an HDF5 file with a potentially large number of objects" import sys import numpy import tables filename = sys.argv[1] # Open a new empty HDF5 file fileh = tables.open_file(filename, mode="w") # nlevels -- Number of levels in hierarchy # ngroups -- Number of groups on each level # ndatasets -- Number of arrays on each group # LR: Low ratio groups/datasets #nlevels, ngroups, ndatasets = (3, 1, 1000) # MR: Medium ratio groups/datasets nlevels, ngroups, ndatasets = (3, 10, 100) #nlevels, ngroups, ndatasets = (3, 5, 10) # HR: High ratio groups/datasets #nlevels, ngroups, ndatasets = (30, 10, 10) # Create an Array to save on disk a = numpy.array([-1, 2, 4], numpy.int16) group = fileh.root group2 = fileh.root for k in range(nlevels): for j in range(ngroups): for i in range(ndatasets): # Save the array on the HDF5 file fileh.create_array(group2, 'array' + str(i), a, "Signed short array") # Create a new group group2 = fileh.create_group(group, 'group' + str(j)) # Create a new group group3 = fileh.create_group(group, 'ngroup' + str(k)) # Iterate over this new group (group3) group = group3 group2 = group3 fileh.close()
29.069767
70
0.6576
import sys import numpy import tables filename = sys.argv[1] fileh = tables.open_file(filename, mode="w") nlevels, ngroups, ndatasets = (3, 10, 100) a = numpy.array([-1, 2, 4], numpy.int16) group = fileh.root group2 = fileh.root for k in range(nlevels): for j in range(ngroups): for i in range(ndatasets): fileh.create_array(group2, 'array' + str(i), a, "Signed short array") group2 = fileh.create_group(group, 'group' + str(j)) group3 = fileh.create_group(group, 'ngroup' + str(k)) group = group3 group2 = group3 fileh.close()
true
true
f70406eb9b9fe364a7fb8937344dde40982c8523
1,391
py
Python
test/test_logging.py
LukasSp/pyPESTO
f4260ff6cacce982bb25fe104e04fb761efdf0ec
[ "BSD-3-Clause" ]
null
null
null
test/test_logging.py
LukasSp/pyPESTO
f4260ff6cacce982bb25fe104e04fb761efdf0ec
[ "BSD-3-Clause" ]
null
null
null
test/test_logging.py
LukasSp/pyPESTO
f4260ff6cacce982bb25fe104e04fb761efdf0ec
[ "BSD-3-Clause" ]
null
null
null
import logging import os import unittest import pypesto import pypesto.logging class LoggingTest(unittest.TestCase): def test_optimize(self): # logging pypesto.logging.log_to_console(logging.WARN) filename = ".test_logging.tmp" pypesto.logging.log_to_file(logging.DEBUG, filename) logger = logging.getLogger('pypesto') if os.path.exists(filename): os.remove(filename) fh = logging.FileHandler(filename) fh.setLevel(logging.DEBUG) logger.addHandler(fh) logger.info("start test") # problem definition def fun(_): raise Exception("This function cannot be called.") objective = pypesto.Objective(fun=fun) problem = pypesto.Problem(objective, -1, 1) optimizer = pypesto.ScipyOptimizer() options = {'allow_failed_starts': True} # optimization pypesto.minimize(problem, optimizer, 5, options=options) # assert logging worked self.assertTrue(os.path.exists(filename)) f = open(filename, 'rb') content = str(f.read()) f.close() # tidy up os.remove(filename) # check if error message got inserted self.assertTrue("fail" in content) if __name__ == '__main__': suite = unittest.TestSuite() suite.addTest(LoggingTest()) unittest.main()
26.245283
64
0.629763
import logging import os import unittest import pypesto import pypesto.logging class LoggingTest(unittest.TestCase): def test_optimize(self): pypesto.logging.log_to_console(logging.WARN) filename = ".test_logging.tmp" pypesto.logging.log_to_file(logging.DEBUG, filename) logger = logging.getLogger('pypesto') if os.path.exists(filename): os.remove(filename) fh = logging.FileHandler(filename) fh.setLevel(logging.DEBUG) logger.addHandler(fh) logger.info("start test") def fun(_): raise Exception("This function cannot be called.") objective = pypesto.Objective(fun=fun) problem = pypesto.Problem(objective, -1, 1) optimizer = pypesto.ScipyOptimizer() options = {'allow_failed_starts': True} pypesto.minimize(problem, optimizer, 5, options=options) self.assertTrue(os.path.exists(filename)) f = open(filename, 'rb') content = str(f.read()) f.close() os.remove(filename) self.assertTrue("fail" in content) if __name__ == '__main__': suite = unittest.TestSuite() suite.addTest(LoggingTest()) unittest.main()
true
true
f7040787cfab454ed3ba0eff92e82d5a1f9ab64d
116
py
Python
Django/tasking-and-analysis-system-django/tasking-and-analysis-system/apps/audit_trail/urls.py
Yeva9/ITC-projects
19e967d656c86c64f04cc1ffbe03540f97c6eb34
[ "MIT" ]
null
null
null
Django/tasking-and-analysis-system-django/tasking-and-analysis-system/apps/audit_trail/urls.py
Yeva9/ITC-projects
19e967d656c86c64f04cc1ffbe03540f97c6eb34
[ "MIT" ]
null
null
null
Django/tasking-and-analysis-system-django/tasking-and-analysis-system/apps/audit_trail/urls.py
Yeva9/ITC-projects
19e967d656c86c64f04cc1ffbe03540f97c6eb34
[ "MIT" ]
null
null
null
from django.urls import path from .views import audit_view urlpatterns = [ path('', audit_view, name="audit") ]
19.333333
38
0.715517
from django.urls import path from .views import audit_view urlpatterns = [ path('', audit_view, name="audit") ]
true
true
f70407ea056eccf4a4115cfa0cb37026b3d9e89b
5,777
py
Python
dashboard/views.py
rossm6/accounts
74633ce4038806222048d85ef9dfe97a957a6a71
[ "MIT" ]
11
2021-01-23T01:09:54.000Z
2021-01-25T07:16:30.000Z
dashboard/views.py
rossm6/accounts
74633ce4038806222048d85ef9dfe97a957a6a71
[ "MIT" ]
7
2021-04-06T18:19:10.000Z
2021-09-22T19:45:03.000Z
dashboard/views.py
rossm6/accounts
74633ce4038806222048d85ef9dfe97a957a6a71
[ "MIT" ]
3
2021-01-23T18:55:32.000Z
2021-02-16T17:47:59.000Z
from cashbook.models import CashBookTransaction from controls.models import ModuleSettings, Period from django.contrib.auth.mixins import LoginRequiredMixin from django.db.models import F, OuterRef, Subquery, Sum from django.db.models.functions import Coalesce from django.views.generic import TemplateView from purchases.models import PurchaseHeader, PurchaseMatching from sales.models import SaleHeader, SaleMatching class TotalOwedReport: def __init__(self, header_model, match_model): self.header_model = header_model self.match_model = match_model def _report(self, matched_by, matched_to, types, period_subquery): return ( self.header_model .objects .filter(type__in=types) .filter(period__fy_and_period__in=Subquery(period_subquery)) .annotate( mbt=Coalesce( Subquery( matched_by.values('matched_by_total') ), 0 ) ) .annotate( mtt=Coalesce( Subquery( matched_to.values('matched_to_total') ), 0 ) ) .annotate( actual_due=F('due') + F('mbt') + F('mtt') ) ) def _report_per_period_for_last_5_periods(self, matched_by, matched_to, types, period): period_subquery = ( Period .objects .filter(fy_and_period__lte=period.fy_and_period) .values('fy_and_period') .order_by("-fy_and_period") [:5] ) q = ( self ._report(matched_by, matched_to, types, period_subquery) .values('period__fy_and_period') .annotate( total_due=Coalesce(Sum('actual_due'), 0) ) ) report = {} for period in period_subquery: report[period["fy_and_period"]] = 0 for period in q: report[period["period__fy_and_period"]] = period["total_due"] return report def _report_for_all_periods_prior(self, matched_by, matched_to, types, period): """ Get the total owed for all periods prior to @period i.e. the total for 'Older' """ period_subquery = ( Period .objects .filter(fy_and_period__lte=period.fy_and_period) .values('fy_and_period') .order_by("-fy_and_period") [5:] ) return ( self ._report(matched_by, matched_to, types, period_subquery) .aggregate( total_due=Coalesce(Sum('actual_due'), 0) ) ) def report(self, current_period): """ This is used by the dashboard and not the aged creditors report """ matched_by = ( self.match_model .objects .filter(period__fy_and_period__gt=current_period.fy_and_period) .filter(matched_by=OuterRef('pk')) .values('matched_by') .annotate(matched_by_total=Sum('value') * -1) ) matched_to = ( self.match_model .objects .filter(period__fy_and_period__gt=current_period.fy_and_period) .filter(matched_to=OuterRef('pk')) .values('matched_to') .annotate(matched_to_total=Sum('value')) ) non_payment_types = [ t[0] for t in self.header_model.types if t[0] not in self.header_model.payment_types ] report_from_current_to_4_periods_ago = self._report_per_period_for_last_5_periods( matched_by, matched_to, non_payment_types, current_period) older = self._report_for_all_periods_prior( matched_by, matched_to, non_payment_types, current_period) report = [] labels = ["Current", "1 period ago", "2 periods ago", "3 periods ago", "4 periods ago"] for i, (period, value) in enumerate(report_from_current_to_4_periods_ago.items()): r = { "period": labels[i], "value": value } report.append(r) report.append({ "period": "Older", "value": older["total_due"] }) report.reverse() # In UI we actually want 'Older' to show first from left to right i.e. opposite of list return report class DashBoard(LoginRequiredMixin, TemplateView): template_name = "dashboard/dashboard.html" def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) mod_settings = ModuleSettings.objects.first() cash_book_period = mod_settings.cash_book_period cash_book_in_and_out_report = ( CashBookTransaction .objects .cash_book_in_and_out_report(cash_book_period) ) cash_book_in_and_out = [] for period in cash_book_in_and_out_report: p = period["period__fy_and_period"] o = {} o["period"] = p[4:] + " " + p[:4] o["in"] = period["total_monies_in"] o["out"] = period["total_monies_out"] cash_book_in_and_out.append(o) context["cash_in_and_out"] = cash_book_in_and_out owed_to_you = TotalOwedReport( SaleHeader, SaleMatching).report(mod_settings.sales_period) owed_by_you = TotalOwedReport(PurchaseHeader, PurchaseMatching).report( mod_settings.purchases_period) context["owed_to_you"] = owed_to_you context["owed_by_you"] = owed_by_you return context
35.660494
112
0.57677
from cashbook.models import CashBookTransaction from controls.models import ModuleSettings, Period from django.contrib.auth.mixins import LoginRequiredMixin from django.db.models import F, OuterRef, Subquery, Sum from django.db.models.functions import Coalesce from django.views.generic import TemplateView from purchases.models import PurchaseHeader, PurchaseMatching from sales.models import SaleHeader, SaleMatching class TotalOwedReport: def __init__(self, header_model, match_model): self.header_model = header_model self.match_model = match_model def _report(self, matched_by, matched_to, types, period_subquery): return ( self.header_model .objects .filter(type__in=types) .filter(period__fy_and_period__in=Subquery(period_subquery)) .annotate( mbt=Coalesce( Subquery( matched_by.values('matched_by_total') ), 0 ) ) .annotate( mtt=Coalesce( Subquery( matched_to.values('matched_to_total') ), 0 ) ) .annotate( actual_due=F('due') + F('mbt') + F('mtt') ) ) def _report_per_period_for_last_5_periods(self, matched_by, matched_to, types, period): period_subquery = ( Period .objects .filter(fy_and_period__lte=period.fy_and_period) .values('fy_and_period') .order_by("-fy_and_period") [:5] ) q = ( self ._report(matched_by, matched_to, types, period_subquery) .values('period__fy_and_period') .annotate( total_due=Coalesce(Sum('actual_due'), 0) ) ) report = {} for period in period_subquery: report[period["fy_and_period"]] = 0 for period in q: report[period["period__fy_and_period"]] = period["total_due"] return report def _report_for_all_periods_prior(self, matched_by, matched_to, types, period): period_subquery = ( Period .objects .filter(fy_and_period__lte=period.fy_and_period) .values('fy_and_period') .order_by("-fy_and_period") [5:] ) return ( self ._report(matched_by, matched_to, types, period_subquery) .aggregate( total_due=Coalesce(Sum('actual_due'), 0) ) ) def report(self, current_period): matched_by = ( self.match_model .objects .filter(period__fy_and_period__gt=current_period.fy_and_period) .filter(matched_by=OuterRef('pk')) .values('matched_by') .annotate(matched_by_total=Sum('value') * -1) ) matched_to = ( self.match_model .objects .filter(period__fy_and_period__gt=current_period.fy_and_period) .filter(matched_to=OuterRef('pk')) .values('matched_to') .annotate(matched_to_total=Sum('value')) ) non_payment_types = [ t[0] for t in self.header_model.types if t[0] not in self.header_model.payment_types ] report_from_current_to_4_periods_ago = self._report_per_period_for_last_5_periods( matched_by, matched_to, non_payment_types, current_period) older = self._report_for_all_periods_prior( matched_by, matched_to, non_payment_types, current_period) report = [] labels = ["Current", "1 period ago", "2 periods ago", "3 periods ago", "4 periods ago"] for i, (period, value) in enumerate(report_from_current_to_4_periods_ago.items()): r = { "period": labels[i], "value": value } report.append(r) report.append({ "period": "Older", "value": older["total_due"] }) report.reverse() return report class DashBoard(LoginRequiredMixin, TemplateView): template_name = "dashboard/dashboard.html" def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) mod_settings = ModuleSettings.objects.first() cash_book_period = mod_settings.cash_book_period cash_book_in_and_out_report = ( CashBookTransaction .objects .cash_book_in_and_out_report(cash_book_period) ) cash_book_in_and_out = [] for period in cash_book_in_and_out_report: p = period["period__fy_and_period"] o = {} o["period"] = p[4:] + " " + p[:4] o["in"] = period["total_monies_in"] o["out"] = period["total_monies_out"] cash_book_in_and_out.append(o) context["cash_in_and_out"] = cash_book_in_and_out owed_to_you = TotalOwedReport( SaleHeader, SaleMatching).report(mod_settings.sales_period) owed_by_you = TotalOwedReport(PurchaseHeader, PurchaseMatching).report( mod_settings.purchases_period) context["owed_to_you"] = owed_to_you context["owed_by_you"] = owed_by_you return context
true
true
f70409d59ca7a839a4f7c3eb33d954c30b63472a
9,299
py
Python
Lib/site-packages/deriva/transfer/download/processors/query/base_query_processor.py
fochoao/cpython
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
[ "bzip2-1.0.6", "0BSD" ]
3
2018-11-18T19:33:53.000Z
2019-10-03T18:27:49.000Z
deriva/transfer/download/processors/query/base_query_processor.py
informatics-isi-edu/deriva-py
e7e18e3d65a01a530ed52bb94e8710ae57026e6d
[ "Apache-2.0" ]
81
2017-06-13T18:46:47.000Z
2022-01-13T01:16:33.000Z
Lib/site-packages/deriva/transfer/download/processors/query/base_query_processor.py
fochoao/cpython
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
[ "bzip2-1.0.6", "0BSD" ]
4
2018-06-25T18:23:33.000Z
2021-01-15T19:38:52.000Z
import os import errno import certifi import requests from deriva.core import urlsplit, get_new_requests_session, stob, make_dirs, DEFAULT_SESSION_CONFIG from deriva.transfer.download import DerivaDownloadError, DerivaDownloadConfigurationError, \ DerivaDownloadAuthenticationError, DerivaDownloadAuthorizationError from deriva.transfer.download.processors.base_processor import BaseProcessor, \ LOCAL_PATH_KEY, FILE_SIZE_KEY, SOURCE_URL_KEY from bdbag import bdbag_ro as ro class BaseQueryProcessor(BaseProcessor): """ Base class for QueryProcessor classes """ HEADERS = {'Connection': 'keep-alive'} def __init__(self, envars=None, **kwargs): super(BaseQueryProcessor, self).__init__(envars, **kwargs) self.catalog = kwargs["catalog"] self.store = kwargs["store"] self.base_path = kwargs["base_path"] self.query = self.parameters["query_path"] if self.envars: self.query = self.query.format(**self.envars) self.sub_path = self.parameters.get("output_path") self.output_filename = self.parameters.get("output_filename") self.store_base = kwargs.get("store_base", "/hatrac/") self.is_bag = kwargs.get("bag", False) self.sessions = kwargs.get("sessions", dict()) self.content_type = "application/octet-stream" self.url = ''.join([self.catalog.get_server_uri(), self.query]) self.ro_file_provenance = stob(self.parameters.get("ro_file_provenance", False if not self.is_bag else True)) self.ro_manifest = self.kwargs.get("ro_manifest") self.ro_author_name = self.kwargs.get("ro_author_name") self.ro_author_orcid = self.kwargs.get("ro_author_orcid") self.output_relpath = None self.output_abspath = None self.paged_query = self.parameters.get("paged_query", False) self.paged_query_size = self.parameters.get("paged_query_size", 100000) def process(self): resp = self.catalogQuery(headers={'accept': self.content_type}) if os.path.isfile(self.output_abspath): if self.ro_manifest and self.ro_file_provenance: ro.add_file_metadata(self.ro_manifest, source_url=self.url, local_path=self.output_relpath, media_type=self.content_type, retrieved_on=ro.make_retrieved_on(), retrieved_by=ro.make_retrieved_by(self.ro_author_name, orcid=self.ro_author_orcid), bundled_as=ro.make_bundled_as()) self.outputs.update({self.output_relpath: {LOCAL_PATH_KEY: self.output_abspath, FILE_SIZE_KEY: os.path.getsize(self.output_abspath), SOURCE_URL_KEY: self.url}}) return self.outputs def catalogQuery(self, headers=None, as_file=True): if not headers: headers = self.HEADERS.copy() else: headers.update(self.HEADERS) if as_file: output_dir = os.path.dirname(self.output_abspath) make_dirs(output_dir) try: if as_file: return self.catalog.getAsFile(self.query, self.output_abspath, headers=headers, delete_if_empty=True, paged=self.paged_query, page_size=self.paged_query_size) else: return self.catalog.get(self.query, headers=headers).json() except requests.HTTPError as e: if e.response.status_code == 401: raise DerivaDownloadAuthenticationError(e) if e.response.status_code == 403: raise DerivaDownloadAuthorizationError(e) if as_file: os.remove(self.output_abspath) raise DerivaDownloadError("Error executing catalog query: %s" % e) except Exception: if as_file: os.remove(self.output_abspath) raise def headForHeaders(self, url, raise_for_status=False): store = self.getHatracStore(url) if store: r = store.head(url, headers=self.HEADERS) if raise_for_status: r.raise_for_status() headers = r.headers else: url = self.getExternalUrl(url) session = self.getExternalSession(urlsplit(url).hostname) r = session.head(url, headers=self.HEADERS) if raise_for_status: r.raise_for_status() headers = r.headers return headers def getHatracStore(self, url): urlparts = urlsplit(url) if not urlparts.path.startswith(self.store_base): return None if url.startswith(self.store_base): return self.store else: serverURI = urlparts.scheme + "://" + urlparts.netloc if serverURI == self.store.get_server_uri(): return self.store else: # do we need to deal with the possibility of a fully qualified URL referencing a different hatrac host? raise DerivaDownloadConfigurationError( "Got a reference to a Hatrac server [%s] that is different from the expected Hatrac server: %s" % ( serverURI, self.store.get_server_uri)) def getExternalUrl(self, url): urlparts = urlsplit(url) if urlparts.path.startswith(self.store_base): path_only = url.startswith(self.store_base) server_uri = urlparts.scheme + "://" + urlparts.netloc if server_uri == self.store.get_server_uri() or path_only: url = ''.join([self.store.get_server_uri(), url]) if path_only else url else: if not (urlparts.scheme and urlparts.netloc): urlparts = urlsplit(self.catalog.get_server_uri()) server_uri = urlparts.scheme + "://" + urlparts.netloc url = ''.join([server_uri, url]) return url def getExternalSession(self, host): sessions = self.sessions auth_params = self.kwargs.get("auth_params", dict()) cookies = auth_params.get("cookies") auth_url = auth_params.get("auth_url") login_params = auth_params.get("login_params") session_config = self.kwargs.get("session_config") session = sessions.get(host) if session is not None: return session if not session_config: session_config = DEFAULT_SESSION_CONFIG session = get_new_requests_session(session_config=session_config) if cookies: session.cookies.update(cookies) if login_params and auth_url: r = session.post(auth_url, data=login_params, verify=certifi.where()) if r.status_code > 203: raise DerivaDownloadError( 'GetExternalSession Failed with Status Code: %s\n%s\n' % (r.status_code, r.text)) sessions[host] = session return session def create_default_paths(self): self.output_relpath, self.output_abspath = self.create_paths(self.base_path, sub_path=self.sub_path, filename=self.output_filename, ext=self.ext, is_bag=self.is_bag, envars=self.envars) def __del__(self): for session in self.sessions.values(): session.close() class CSVQueryProcessor(BaseQueryProcessor): def __init__(self, envars=None, **kwargs): super(CSVQueryProcessor, self).__init__(envars, **kwargs) self.ext = ".csv" self.content_type = "text/csv" self.create_default_paths() class JSONQueryProcessor(BaseQueryProcessor): def __init__(self, envars=None, **kwargs): super(JSONQueryProcessor, self).__init__(envars, **kwargs) self.ext = ".json" self.content_type = "application/json" self.create_default_paths() class JSONStreamQueryProcessor(BaseQueryProcessor): def __init__(self, envars=None, **kwargs): super(JSONStreamQueryProcessor, self).__init__(envars, **kwargs) self.ext = ".json" self.content_type = "application/x-json-stream" self.create_default_paths() class JSONEnvUpdateProcessor(BaseQueryProcessor): def __init__(self, envars=None, **kwargs): super(JSONEnvUpdateProcessor, self).__init__(envars, **kwargs) def process(self): resp = self.catalogQuery(headers={'accept': "application/json"}, as_file=False) if resp: self.envars.update(resp[0]) self._urlencode_envars() return {}
43.657277
119
0.586515
import os import errno import certifi import requests from deriva.core import urlsplit, get_new_requests_session, stob, make_dirs, DEFAULT_SESSION_CONFIG from deriva.transfer.download import DerivaDownloadError, DerivaDownloadConfigurationError, \ DerivaDownloadAuthenticationError, DerivaDownloadAuthorizationError from deriva.transfer.download.processors.base_processor import BaseProcessor, \ LOCAL_PATH_KEY, FILE_SIZE_KEY, SOURCE_URL_KEY from bdbag import bdbag_ro as ro class BaseQueryProcessor(BaseProcessor): HEADERS = {'Connection': 'keep-alive'} def __init__(self, envars=None, **kwargs): super(BaseQueryProcessor, self).__init__(envars, **kwargs) self.catalog = kwargs["catalog"] self.store = kwargs["store"] self.base_path = kwargs["base_path"] self.query = self.parameters["query_path"] if self.envars: self.query = self.query.format(**self.envars) self.sub_path = self.parameters.get("output_path") self.output_filename = self.parameters.get("output_filename") self.store_base = kwargs.get("store_base", "/hatrac/") self.is_bag = kwargs.get("bag", False) self.sessions = kwargs.get("sessions", dict()) self.content_type = "application/octet-stream" self.url = ''.join([self.catalog.get_server_uri(), self.query]) self.ro_file_provenance = stob(self.parameters.get("ro_file_provenance", False if not self.is_bag else True)) self.ro_manifest = self.kwargs.get("ro_manifest") self.ro_author_name = self.kwargs.get("ro_author_name") self.ro_author_orcid = self.kwargs.get("ro_author_orcid") self.output_relpath = None self.output_abspath = None self.paged_query = self.parameters.get("paged_query", False) self.paged_query_size = self.parameters.get("paged_query_size", 100000) def process(self): resp = self.catalogQuery(headers={'accept': self.content_type}) if os.path.isfile(self.output_abspath): if self.ro_manifest and self.ro_file_provenance: ro.add_file_metadata(self.ro_manifest, source_url=self.url, local_path=self.output_relpath, media_type=self.content_type, retrieved_on=ro.make_retrieved_on(), retrieved_by=ro.make_retrieved_by(self.ro_author_name, orcid=self.ro_author_orcid), bundled_as=ro.make_bundled_as()) self.outputs.update({self.output_relpath: {LOCAL_PATH_KEY: self.output_abspath, FILE_SIZE_KEY: os.path.getsize(self.output_abspath), SOURCE_URL_KEY: self.url}}) return self.outputs def catalogQuery(self, headers=None, as_file=True): if not headers: headers = self.HEADERS.copy() else: headers.update(self.HEADERS) if as_file: output_dir = os.path.dirname(self.output_abspath) make_dirs(output_dir) try: if as_file: return self.catalog.getAsFile(self.query, self.output_abspath, headers=headers, delete_if_empty=True, paged=self.paged_query, page_size=self.paged_query_size) else: return self.catalog.get(self.query, headers=headers).json() except requests.HTTPError as e: if e.response.status_code == 401: raise DerivaDownloadAuthenticationError(e) if e.response.status_code == 403: raise DerivaDownloadAuthorizationError(e) if as_file: os.remove(self.output_abspath) raise DerivaDownloadError("Error executing catalog query: %s" % e) except Exception: if as_file: os.remove(self.output_abspath) raise def headForHeaders(self, url, raise_for_status=False): store = self.getHatracStore(url) if store: r = store.head(url, headers=self.HEADERS) if raise_for_status: r.raise_for_status() headers = r.headers else: url = self.getExternalUrl(url) session = self.getExternalSession(urlsplit(url).hostname) r = session.head(url, headers=self.HEADERS) if raise_for_status: r.raise_for_status() headers = r.headers return headers def getHatracStore(self, url): urlparts = urlsplit(url) if not urlparts.path.startswith(self.store_base): return None if url.startswith(self.store_base): return self.store else: serverURI = urlparts.scheme + "://" + urlparts.netloc if serverURI == self.store.get_server_uri(): return self.store else: raise DerivaDownloadConfigurationError( "Got a reference to a Hatrac server [%s] that is different from the expected Hatrac server: %s" % ( serverURI, self.store.get_server_uri)) def getExternalUrl(self, url): urlparts = urlsplit(url) if urlparts.path.startswith(self.store_base): path_only = url.startswith(self.store_base) server_uri = urlparts.scheme + "://" + urlparts.netloc if server_uri == self.store.get_server_uri() or path_only: url = ''.join([self.store.get_server_uri(), url]) if path_only else url else: if not (urlparts.scheme and urlparts.netloc): urlparts = urlsplit(self.catalog.get_server_uri()) server_uri = urlparts.scheme + "://" + urlparts.netloc url = ''.join([server_uri, url]) return url def getExternalSession(self, host): sessions = self.sessions auth_params = self.kwargs.get("auth_params", dict()) cookies = auth_params.get("cookies") auth_url = auth_params.get("auth_url") login_params = auth_params.get("login_params") session_config = self.kwargs.get("session_config") session = sessions.get(host) if session is not None: return session if not session_config: session_config = DEFAULT_SESSION_CONFIG session = get_new_requests_session(session_config=session_config) if cookies: session.cookies.update(cookies) if login_params and auth_url: r = session.post(auth_url, data=login_params, verify=certifi.where()) if r.status_code > 203: raise DerivaDownloadError( 'GetExternalSession Failed with Status Code: %s\n%s\n' % (r.status_code, r.text)) sessions[host] = session return session def create_default_paths(self): self.output_relpath, self.output_abspath = self.create_paths(self.base_path, sub_path=self.sub_path, filename=self.output_filename, ext=self.ext, is_bag=self.is_bag, envars=self.envars) def __del__(self): for session in self.sessions.values(): session.close() class CSVQueryProcessor(BaseQueryProcessor): def __init__(self, envars=None, **kwargs): super(CSVQueryProcessor, self).__init__(envars, **kwargs) self.ext = ".csv" self.content_type = "text/csv" self.create_default_paths() class JSONQueryProcessor(BaseQueryProcessor): def __init__(self, envars=None, **kwargs): super(JSONQueryProcessor, self).__init__(envars, **kwargs) self.ext = ".json" self.content_type = "application/json" self.create_default_paths() class JSONStreamQueryProcessor(BaseQueryProcessor): def __init__(self, envars=None, **kwargs): super(JSONStreamQueryProcessor, self).__init__(envars, **kwargs) self.ext = ".json" self.content_type = "application/x-json-stream" self.create_default_paths() class JSONEnvUpdateProcessor(BaseQueryProcessor): def __init__(self, envars=None, **kwargs): super(JSONEnvUpdateProcessor, self).__init__(envars, **kwargs) def process(self): resp = self.catalogQuery(headers={'accept': "application/json"}, as_file=False) if resp: self.envars.update(resp[0]) self._urlencode_envars() return {}
true
true
f7040b9b8b2205c05f39974dc23dd90767f1595b
1,659
py
Python
tests/test_audio.py
Lisafiluz/calendar
a88e34f7ab9dd25753ca041461e56d20a7f9fd1e
[ "Apache-2.0" ]
null
null
null
tests/test_audio.py
Lisafiluz/calendar
a88e34f7ab9dd25753ca041461e56d20a7f9fd1e
[ "Apache-2.0" ]
null
null
null
tests/test_audio.py
Lisafiluz/calendar
a88e34f7ab9dd25753ca041461e56d20a7f9fd1e
[ "Apache-2.0" ]
null
null
null
from app.routers.audio import router AUDIO_SETTINGS_URL = router.url_path_for("audio_settings") GET_CHOICES_URL = router.url_path_for("get_choices") START_AUDIO_URL = router.url_path_for("start_audio") def test_get_settings(audio_test_client): response = audio_test_client.get(url=AUDIO_SETTINGS_URL) assert response.ok assert b"Audio Settings" in response.content def test_start_audio_default(audio_test_client): response = audio_test_client.get(START_AUDIO_URL) assert response.ok def test_choices_Off(audio_test_client): data = {"music_on": False, "sfx_on": False} response = audio_test_client.post(url=GET_CHOICES_URL, data=data) assert response.ok def test_choices_On(audio_test_client): data = { "music_on": True, "music_choices": ["GASTRONOMICA.mp3"], "music_vol": 50, "sfx_on": True, "sfx_choice": "click_1.wav", "sfx_vol": 50, } response = audio_test_client.post(url=GET_CHOICES_URL, data=data) assert response.ok def test_start_audio(audio_test_client): data = { "music_on": True, "music_choices": ["GASTRONOMICA.mp3"], "music_vol": 50, "sfx_on": True, "sfx_choice": "click_1.wav", "sfx_vol": 50, } audio_test_client.post(url=GET_CHOICES_URL, data=data) response = audio_test_client.get(url=START_AUDIO_URL) assert response.ok def test_start_audio_sfx_off(audio_test_client): data = {"music_on_off": "Off", "sfx_on_off": "Off"} audio_test_client.post(url=GET_CHOICES_URL, data=data) response = audio_test_client.get(url=START_AUDIO_URL) assert response.ok
29.105263
69
0.705244
from app.routers.audio import router AUDIO_SETTINGS_URL = router.url_path_for("audio_settings") GET_CHOICES_URL = router.url_path_for("get_choices") START_AUDIO_URL = router.url_path_for("start_audio") def test_get_settings(audio_test_client): response = audio_test_client.get(url=AUDIO_SETTINGS_URL) assert response.ok assert b"Audio Settings" in response.content def test_start_audio_default(audio_test_client): response = audio_test_client.get(START_AUDIO_URL) assert response.ok def test_choices_Off(audio_test_client): data = {"music_on": False, "sfx_on": False} response = audio_test_client.post(url=GET_CHOICES_URL, data=data) assert response.ok def test_choices_On(audio_test_client): data = { "music_on": True, "music_choices": ["GASTRONOMICA.mp3"], "music_vol": 50, "sfx_on": True, "sfx_choice": "click_1.wav", "sfx_vol": 50, } response = audio_test_client.post(url=GET_CHOICES_URL, data=data) assert response.ok def test_start_audio(audio_test_client): data = { "music_on": True, "music_choices": ["GASTRONOMICA.mp3"], "music_vol": 50, "sfx_on": True, "sfx_choice": "click_1.wav", "sfx_vol": 50, } audio_test_client.post(url=GET_CHOICES_URL, data=data) response = audio_test_client.get(url=START_AUDIO_URL) assert response.ok def test_start_audio_sfx_off(audio_test_client): data = {"music_on_off": "Off", "sfx_on_off": "Off"} audio_test_client.post(url=GET_CHOICES_URL, data=data) response = audio_test_client.get(url=START_AUDIO_URL) assert response.ok
true
true
f7040c6cca5a86749407c6d12a090a8e1288ff52
6,990
py
Python
src/teleop_tools/mouse_teleop/scripts/mouse_teleop.py
aljanabim/svea
37d27089237af3777456d7664473ffb811dabf33
[ "MIT" ]
5
2021-06-25T13:09:30.000Z
2022-03-15T11:33:07.000Z
src/teleop_tools/mouse_teleop/scripts/mouse_teleop.py
aljanabim/svea
37d27089237af3777456d7664473ffb811dabf33
[ "MIT" ]
null
null
null
src/teleop_tools/mouse_teleop/scripts/mouse_teleop.py
aljanabim/svea
37d27089237af3777456d7664473ffb811dabf33
[ "MIT" ]
17
2019-09-29T10:22:41.000Z
2021-04-08T12:38:37.000Z
#! /usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2015 Enrique Fernandez # Released under the BSD License. # # Authors: # * Enrique Fernandez import Tkinter import rospy from geometry_msgs.msg import Twist, Vector3 import numpy class MouseTeleop(): def __init__(self): # Retrieve params: self._frequency = rospy.get_param('~frequency', 0.0) self._scale = rospy.get_param('~scale', 1.0) self._holonomic = rospy.get_param('~holonomic', False) # Create twist publisher: self._pub_cmd = rospy.Publisher('mouse_vel', Twist, queue_size=100) # Initialize twist components to zero: self._v_x = 0.0 self._v_y = 0.0 self._w = 0.0 # Initialize mouse position (x, y) to None (unknown); it's initialized # when the mouse button is pressed on the _start callback that handles # that event: self._x = None self._y = None # Create window: self._root = Tkinter.Tk() self._root.title('Mouse Teleop') # Make window non-resizable: self._root.resizable(0, 0) # Create canvas: self._canvas = Tkinter.Canvas(self._root, bg='white') # Create canvas objects: self._canvas.create_arc(0, 0, 0, 0, fill='red', outline='red', width=1, style=Tkinter.PIESLICE, start=90.0, tag='w') self._canvas.create_line(0, 0, 0, 0, fill='blue', width=4, tag='v_x') if self._holonomic: self._canvas.create_line(0, 0, 0, 0, fill='blue', width=4, tag='v_y') # Create canvas text objects: self._text_v_x = Tkinter.StringVar() if self._holonomic: self._text_v_y = Tkinter.StringVar() self._text_w = Tkinter.StringVar() self._label_v_x = Tkinter.Label(self._root, anchor=Tkinter.W, textvariable=self._text_v_x) if self._holonomic: self._label_v_y = Tkinter.Label(self._root, anchor=Tkinter.W, textvariable=self._text_v_y) self._label_w = Tkinter.Label(self._root, anchor=Tkinter.W, textvariable=self._text_w) if self._holonomic: self._text_v_x.set('v_x = %0.2f m/s' % self._v_x) self._text_v_y.set('v_y = %0.2f m/s' % self._v_y) self._text_w.set( 'w = %0.2f deg/s' % self._w) else: self._text_v_x.set('v = %0.2f m/s' % self._v_x) self._text_w.set( 'w = %0.2f deg/s' % self._w) self._label_v_x.pack() if self._holonomic: self._label_v_y.pack() self._label_w.pack() # Bind event handlers: self._canvas.bind('<Button-1>', self._start) self._canvas.bind('<ButtonRelease-1>', self._release) self._canvas.bind('<Configure>', self._configure) if self._holonomic: self._canvas.bind('<B1-Motion>', self._mouse_motion_linear) self._canvas.bind('<Shift-B1-Motion>', self._mouse_motion_angular) self._root.bind('<Shift_L>', self._change_to_motion_angular) self._root.bind('<KeyRelease-Shift_L>', self._change_to_motion_linear) else: self._canvas.bind('<B1-Motion>', self._mouse_motion_angular) self._canvas.pack() # If frequency is positive, use synchronous publishing mode: if self._frequency > 0.0: # Create timer for the given frequency to publish the twist: period = rospy.Duration(1.0 / self._frequency) self._timer = rospy.Timer(period, self._publish_twist) # Start window event manager main loop: self._root.mainloop() def __del__(self): if self._frequency > 0.0: self._timer.shutdown() self._root.quit() def _start(self, event): self._x, self._y = event.y, event.x self._y_linear = self._y_angular = 0 self._v_x = self._v_y = self._w = 0.0 def _release(self, event): self._v_x = self._v_y = self._w = 0.0 self._send_motion() def _configure(self, event): self._width, self._height = event.height, event.width self._c_x = self._height / 2.0 self._c_y = self._width / 2.0 self._r = min(self._height, self._width) * 0.25 def _mouse_motion_linear(self, event): self._v_x, self._v_y = self._relative_motion(event.y, event.x) self._send_motion() def _mouse_motion_angular(self, event): self._v_x, self._w = self._relative_motion(event.y, event.x) self._send_motion() def _update_coords(self, tag, x0, y0, x1, y1): x0 += self._c_x y0 += self._c_y x1 += self._c_x y1 += self._c_y self._canvas.coords(tag, (x0, y0, x1, y1)) def _draw_v_x(self, v): x = -v * float(self._width) self._update_coords('v_x', 0, 0, 0, x) def _draw_v_y(self, v): y = -v * float(self._height) self._update_coords('v_y', 0, 0, y, 0) def _draw_w(self, w): x0 = y0 = -self._r x1 = y1 = self._r self._update_coords('w', x0, y0, x1, y1) yaw = w * numpy.rad2deg(self._scale) self._canvas.itemconfig('w', extent=yaw) def _send_motion(self): v_x = self._v_x * self._scale v_y = self._v_y * self._scale w = self._w * self._scale linear = Vector3(v_x, v_y, 0.0) angular = Vector3(0.0, 0.0, w) self._draw_v_x(self._v_x) if self._holonomic: self._draw_v_y(self._v_y) self._draw_w(self._w) if self._holonomic: self._text_v_x.set('v_x = %0.2f m/s' % self._v_x) self._text_v_y.set('v_y = %0.2f m/s' % self._v_y) self._text_w.set( 'w = %0.2f deg/s' % numpy.rad2deg(self._w)) else: self._text_v_x.set('v = %0.2f m/s' % self._v_x) self._text_w.set( 'w = %0.2f deg/s' % numpy.rad2deg(self._w)) twist = Twist(linear, angular) self._pub_cmd.publish(twist) def _publish_twist(self, event): self._send_motion() def _relative_motion(self, x, y): dx = self._x - x dy = self._y - y dx /= float(self._width) dy /= float(self._height) dx = max(-1.0, min(dx, 1.0)) dy = max(-1.0, min(dy, 1.0)) return dx, dy def _change_to_motion_linear(self, event): if self._y is not None: y = event.x self._y_angular = self._y - y self._y = self._y_linear + y def _change_to_motion_angular(self, event): if self._y is not None: y = event.x self._y_linear = self._y - y self._y = self._y_angular + y def main(): rospy.init_node('mouse_teleop') MouseTeleop() if __name__ == '__main__': try: main() except rospy.ROSInterruptException: pass
28.884298
78
0.572246
import Tkinter import rospy from geometry_msgs.msg import Twist, Vector3 import numpy class MouseTeleop(): def __init__(self): self._frequency = rospy.get_param('~frequency', 0.0) self._scale = rospy.get_param('~scale', 1.0) self._holonomic = rospy.get_param('~holonomic', False) self._pub_cmd = rospy.Publisher('mouse_vel', Twist, queue_size=100) self._v_x = 0.0 self._v_y = 0.0 self._w = 0.0 # when the mouse button is pressed on the _start callback that handles # that event: self._x = None self._y = None # Create window: self._root = Tkinter.Tk() self._root.title('Mouse Teleop') # Make window non-resizable: self._root.resizable(0, 0) # Create canvas: self._canvas = Tkinter.Canvas(self._root, bg='white') # Create canvas objects: self._canvas.create_arc(0, 0, 0, 0, fill='red', outline='red', width=1, style=Tkinter.PIESLICE, start=90.0, tag='w') self._canvas.create_line(0, 0, 0, 0, fill='blue', width=4, tag='v_x') if self._holonomic: self._canvas.create_line(0, 0, 0, 0, fill='blue', width=4, tag='v_y') # Create canvas text objects: self._text_v_x = Tkinter.StringVar() if self._holonomic: self._text_v_y = Tkinter.StringVar() self._text_w = Tkinter.StringVar() self._label_v_x = Tkinter.Label(self._root, anchor=Tkinter.W, textvariable=self._text_v_x) if self._holonomic: self._label_v_y = Tkinter.Label(self._root, anchor=Tkinter.W, textvariable=self._text_v_y) self._label_w = Tkinter.Label(self._root, anchor=Tkinter.W, textvariable=self._text_w) if self._holonomic: self._text_v_x.set('v_x = %0.2f m/s' % self._v_x) self._text_v_y.set('v_y = %0.2f m/s' % self._v_y) self._text_w.set( 'w = %0.2f deg/s' % self._w) else: self._text_v_x.set('v = %0.2f m/s' % self._v_x) self._text_w.set( 'w = %0.2f deg/s' % self._w) self._label_v_x.pack() if self._holonomic: self._label_v_y.pack() self._label_w.pack() # Bind event handlers: self._canvas.bind('<Button-1>', self._start) self._canvas.bind('<ButtonRelease-1>', self._release) self._canvas.bind('<Configure>', self._configure) if self._holonomic: self._canvas.bind('<B1-Motion>', self._mouse_motion_linear) self._canvas.bind('<Shift-B1-Motion>', self._mouse_motion_angular) self._root.bind('<Shift_L>', self._change_to_motion_angular) self._root.bind('<KeyRelease-Shift_L>', self._change_to_motion_linear) else: self._canvas.bind('<B1-Motion>', self._mouse_motion_angular) self._canvas.pack() # If frequency is positive, use synchronous publishing mode: if self._frequency > 0.0: # Create timer for the given frequency to publish the twist: period = rospy.Duration(1.0 / self._frequency) self._timer = rospy.Timer(period, self._publish_twist) # Start window event manager main loop: self._root.mainloop() def __del__(self): if self._frequency > 0.0: self._timer.shutdown() self._root.quit() def _start(self, event): self._x, self._y = event.y, event.x self._y_linear = self._y_angular = 0 self._v_x = self._v_y = self._w = 0.0 def _release(self, event): self._v_x = self._v_y = self._w = 0.0 self._send_motion() def _configure(self, event): self._width, self._height = event.height, event.width self._c_x = self._height / 2.0 self._c_y = self._width / 2.0 self._r = min(self._height, self._width) * 0.25 def _mouse_motion_linear(self, event): self._v_x, self._v_y = self._relative_motion(event.y, event.x) self._send_motion() def _mouse_motion_angular(self, event): self._v_x, self._w = self._relative_motion(event.y, event.x) self._send_motion() def _update_coords(self, tag, x0, y0, x1, y1): x0 += self._c_x y0 += self._c_y x1 += self._c_x y1 += self._c_y self._canvas.coords(tag, (x0, y0, x1, y1)) def _draw_v_x(self, v): x = -v * float(self._width) self._update_coords('v_x', 0, 0, 0, x) def _draw_v_y(self, v): y = -v * float(self._height) self._update_coords('v_y', 0, 0, y, 0) def _draw_w(self, w): x0 = y0 = -self._r x1 = y1 = self._r self._update_coords('w', x0, y0, x1, y1) yaw = w * numpy.rad2deg(self._scale) self._canvas.itemconfig('w', extent=yaw) def _send_motion(self): v_x = self._v_x * self._scale v_y = self._v_y * self._scale w = self._w * self._scale linear = Vector3(v_x, v_y, 0.0) angular = Vector3(0.0, 0.0, w) self._draw_v_x(self._v_x) if self._holonomic: self._draw_v_y(self._v_y) self._draw_w(self._w) if self._holonomic: self._text_v_x.set('v_x = %0.2f m/s' % self._v_x) self._text_v_y.set('v_y = %0.2f m/s' % self._v_y) self._text_w.set( 'w = %0.2f deg/s' % numpy.rad2deg(self._w)) else: self._text_v_x.set('v = %0.2f m/s' % self._v_x) self._text_w.set( 'w = %0.2f deg/s' % numpy.rad2deg(self._w)) twist = Twist(linear, angular) self._pub_cmd.publish(twist) def _publish_twist(self, event): self._send_motion() def _relative_motion(self, x, y): dx = self._x - x dy = self._y - y dx /= float(self._width) dy /= float(self._height) dx = max(-1.0, min(dx, 1.0)) dy = max(-1.0, min(dy, 1.0)) return dx, dy def _change_to_motion_linear(self, event): if self._y is not None: y = event.x self._y_angular = self._y - y self._y = self._y_linear + y def _change_to_motion_angular(self, event): if self._y is not None: y = event.x self._y_linear = self._y - y self._y = self._y_angular + y def main(): rospy.init_node('mouse_teleop') MouseTeleop() if __name__ == '__main__': try: main() except rospy.ROSInterruptException: pass
true
true
f7040c7d07a67dcc16f387e658425ea49720e40f
10,718
py
Python
tests/image/test_segmentation.py
dnjst/squidpy
ca765d04b9621debb8752d3d4693dd68f6909513
[ "BSD-3-Clause" ]
161
2021-02-15T15:14:22.000Z
2022-03-30T10:06:06.000Z
tests/image/test_segmentation.py
dnjst/squidpy
ca765d04b9621debb8752d3d4693dd68f6909513
[ "BSD-3-Clause" ]
214
2021-02-14T18:20:37.000Z
2022-03-31T18:23:41.000Z
tests/image/test_segmentation.py
dnjst/squidpy
ca765d04b9621debb8752d3d4693dd68f6909513
[ "BSD-3-Clause" ]
36
2021-02-14T18:46:52.000Z
2022-03-17T04:25:37.000Z
from typing import Tuple, Union, Callable, Optional, Sequence from pytest_mock import MockerFixture import pytest import numpy as np import dask.array as da from squidpy.im import ( segment, ImageContainer, SegmentationCustom, SegmentationWatershed, ) from squidpy.im._segment import _SEG_DTYPE from squidpy._constants._constants import SegmentationBackend from squidpy._constants._pkg_constants import Key def dummy_segment(arr: np.ndarray) -> np.ndarray: assert isinstance(arr, np.ndarray) assert arr.ndim == 3 return arr[..., 0].astype(np.uint32) class TestGeneral: @pytest.mark.parametrize("ndim", [2, 3]) def test_input_ndim(self, ndim: int): img = np.zeros(shape=(10, 10)) if ndim == 3: img = img[..., np.newaxis] sc = SegmentationCustom(dummy_segment) res = sc.segment(img) assert isinstance(res, np.ndarray) assert res.ndim == 3 if ndim == 2: assert res.shape == img.shape + (1,) else: assert res.shape == img.shape def test_segment_invalid_shape(self): img = np.zeros(shape=(1, 10, 10, 2)) sc = SegmentationCustom(dummy_segment) with pytest.raises(ValueError, match=r"Expected `2` or `3` dimensions"): sc.segment(img) def test_segment_container(self): img = ImageContainer(np.zeros(shape=(10, 10, 1)), layer="image") sc = SegmentationCustom(dummy_segment) res = sc.segment(img, layer="image", library_id=img["image"].z.values[0]) assert isinstance(res, ImageContainer) assert res.shape == img.shape assert "image" in res assert res["image"].dims == img["image"].dims class TestWatershed: @pytest.mark.parametrize("thresh", [None, 0.1, 0.5, 1.0]) def test_threshold(self, thresh: Optional[float], mocker: MockerFixture): img = np.zeros((100, 200), dtype=np.float64) img[2:10, 2:10] = 1.0 img[30:34, 10:16] = 1.0 img = ImageContainer(img, layer="image") sw = SegmentationWatershed() spy = mocker.spy(sw, "_segment") res = sw.segment(img, layer="image", library_id=img["image"].z.values[0], fn_kwargs={"thresh": thresh}) assert isinstance(res, ImageContainer) spy.assert_called_once() call = spy.call_args_list[0] assert call[1]["thresh"] == thresh class TestHighLevel: def test_invalid_layer(self, small_cont: ImageContainer): with pytest.raises(KeyError, match=r"Image layer `foobar` not found in"): segment(small_cont, layer="foobar") @pytest.mark.parametrize("method", ["watershed", dummy_segment]) def test_method(self, small_cont: ImageContainer, method: Union[str, Callable]): res = segment(small_cont, method=method, copy=True) assert isinstance(res, ImageContainer) assert res.shape == small_cont.shape if callable(method): method = SegmentationBackend.CUSTOM.s assert Key.img.segment(method) in res if method in ("log", "dog", "dog"): assert res[Key.img.segment(method)].values.max() <= 1 @pytest.mark.parametrize("dy", [11, 0.5, None]) @pytest.mark.parametrize("dx", [15, 0.1, None]) def test_size(self, small_cont: ImageContainer, dy: Optional[Union[int, float]], dx: Optional[Union[int, float]]): res = segment(small_cont, size=(dy, dx), copy=True) assert isinstance(res, ImageContainer) assert res.shape == small_cont.shape @pytest.mark.parametrize("channel", [0, 1, 2]) def test_channel(self, small_cont: ImageContainer, channel: int): segment(small_cont, copy=False, layer="image", channel=channel) assert Key.img.segment("watershed") in small_cont np.testing.assert_array_equal( list(small_cont[Key.img.segment("watershed")].dims), ["y", "x", "z", f"{small_cont['image'].dims[-1]}:{channel}"], ) def test_all_channels(self, small_cont: ImageContainer): def func(arr: np.ndarray): assert arr.shape == (small_cont.shape + (n_channels,)) return np.zeros(arr.shape[:2], dtype=np.uint8) n_channels = small_cont["image"].sizes["channels"] segment(small_cont, copy=False, layer="image", channel=None, method=func, layer_added="seg") np.testing.assert_array_equal(small_cont["seg"], np.zeros(small_cont.shape + (1, 1))) assert small_cont["seg"].dtype == _SEG_DTYPE @pytest.mark.parametrize("key_added", [None, "foo"]) def test_key_added(self, small_cont: ImageContainer, key_added: Optional[str]): res = segment(small_cont, copy=False, layer="image", layer_added=key_added) assert res is None assert Key.img.segment("watershed", layer_added=key_added) in small_cont def test_passing_kwargs(self, small_cont: ImageContainer): def func(chunk: np.ndarray, sentinel: bool = False): assert sentinel, "Sentinel not set." return np.zeros(chunk[..., 0].shape, dtype=_SEG_DTYPE) segment( small_cont, method=func, layer="image", layer_added="bar", chunks=25, lazy=False, depth=None, sentinel=True ) assert small_cont["bar"].values.dtype == _SEG_DTYPE np.testing.assert_array_equal(small_cont["bar"].values, 0) @pytest.mark.parametrize("dask_input", [False, True]) @pytest.mark.parametrize("chunks", [25, (50, 50, 1), "auto"]) @pytest.mark.parametrize("lazy", [False, True]) def test_dask_segment( self, small_cont: ImageContainer, dask_input: bool, chunks: Union[int, Tuple[int, ...], str], lazy: bool ): def func(chunk: np.ndarray): if isinstance(chunks, tuple): np.testing.assert_array_equal(chunk.shape, [chunks[0] + 2 * d, chunks[1] + 2 * d, 1]) elif isinstance(chunks, int): np.testing.assert_array_equal(chunk.shape, [chunks + 2 * d, chunks + 2 * d, 1]) return np.zeros(chunk[..., 0].shape, dtype=_SEG_DTYPE) small_cont["foo"] = da.asarray(small_cont["image"].data) if dask_input else small_cont["image"].values d = 10 # overlap depth assert isinstance(small_cont["foo"].data, da.Array if dask_input else np.ndarray) segment(small_cont, method=func, layer="foo", layer_added="bar", chunks=chunks, lazy=lazy, depth={0: d, 1: d}) if lazy: assert isinstance(small_cont["bar"].data, da.Array) small_cont.compute() assert isinstance(small_cont["foo"].data, np.ndarray) else: # make sure we didn't accidentally trigger foo's computation assert isinstance(small_cont["foo"].data, da.Array if dask_input else np.ndarray) assert isinstance(small_cont["bar"].data, np.ndarray) assert small_cont["bar"].values.dtype == _SEG_DTYPE np.testing.assert_array_equal(small_cont["bar"].values, 0) def test_copy(self, small_cont: ImageContainer): prev_keys = set(small_cont) res = segment(small_cont, copy=True, layer="image") assert isinstance(res, ImageContainer) assert set(small_cont) == prev_keys assert Key.img.segment("watershed") in res def test_parallelize(self, small_cont: ImageContainer): res1 = segment(small_cont, layer="image", n_jobs=1, copy=True) res2 = segment(small_cont, layer="image", n_jobs=2, copy=True) np.testing.assert_array_equal( res1[Key.img.segment("watershed")].values, res2[Key.img.segment("watershed")].values ) @pytest.mark.parametrize("chunks", [25, 50]) def test_blocking(self, small_cont: ImageContainer, chunks: int): def func(chunk: np.ndarray): labels = np.zeros(chunk[..., 0].shape, dtype=np.uint32) labels[0, 0] = 1 return labels segment(small_cont, method=func, layer="image", layer_added="bar", chunks=chunks, lazy=False, depth=None) # blocks are label from top-left to bottom-right in an ascending order [0, num_blocks - 1] # lowest n bits are allocated for block, rest is for the label (i.e. for blocksize=25, we need 16 blocks ids # from [0, 15], which can be stored in 4 bits, then we just prepend 1 bit (see the above `func`, resulting # in unique 16 labels [10000, 11111] expected = np.zeros_like(small_cont["bar"].values) start = 16 if chunks == 25 else 4 for i in range(0, 100, chunks): for j in range(0, 100, chunks): expected[i, j] = start start += 1 assert small_cont["bar"].values.dtype == _SEG_DTYPE np.testing.assert_array_equal(small_cont["bar"].values, expected) @pytest.mark.parametrize("size", [None, 11]) def test_watershed_works(self, size: Optional[int]): img_orig = np.zeros((100, 200, 30), dtype=np.float64) img_orig[2:10, 2:10] = 1.0 img_orig[30:34, 10:16] = 1.0 cont = ImageContainer(img_orig, layer="image_0") segment( img=cont, method="watershed", layer="image_0", layer_added="segment", size=size, channel=0, thresh=0.5, ) # check that blobs are in segments assert np.mean(cont.data["segment"].values[img_orig[:, :, 0] > 0] > 0) > 0.5 # for size=10, "fails with `size=10` due to border effects" # the reason why there is no test for it that inside tox, it "works" (i.e. the assertion passes) # but outside, the assertion fails, as it should @pytest.mark.parametrize("library_id", [None, "3", ["1", "2"]]) def test_library_id(self, cont_4d: ImageContainer, library_id: Optional[Union[str, Sequence[str]]]): def func(arr: np.ndarray): assert arr.shape == cont_4d.shape + (1,) return np.ones(arr[..., 0].shape, dtype=_SEG_DTYPE) segment(cont_4d, method=func, layer="image", layer_added="image_seg", library_id=library_id, copy=False) np.testing.assert_array_equal(cont_4d["image"].coords, cont_4d["image_seg"].coords) if library_id is None: np.testing.assert_array_equal(1, cont_4d["image_seg"]) else: if isinstance(library_id, str): library_id = [library_id] for lid in library_id: np.testing.assert_array_equal(1, cont_4d["image_seg"].sel(z=lid)) for lid in set(cont_4d.library_ids) - set(library_id): # channels have been changed, apply sets to 0 np.testing.assert_array_equal(0, cont_4d["image_seg"].sel(z=lid))
41.065134
119
0.630528
from typing import Tuple, Union, Callable, Optional, Sequence from pytest_mock import MockerFixture import pytest import numpy as np import dask.array as da from squidpy.im import ( segment, ImageContainer, SegmentationCustom, SegmentationWatershed, ) from squidpy.im._segment import _SEG_DTYPE from squidpy._constants._constants import SegmentationBackend from squidpy._constants._pkg_constants import Key def dummy_segment(arr: np.ndarray) -> np.ndarray: assert isinstance(arr, np.ndarray) assert arr.ndim == 3 return arr[..., 0].astype(np.uint32) class TestGeneral: @pytest.mark.parametrize("ndim", [2, 3]) def test_input_ndim(self, ndim: int): img = np.zeros(shape=(10, 10)) if ndim == 3: img = img[..., np.newaxis] sc = SegmentationCustom(dummy_segment) res = sc.segment(img) assert isinstance(res, np.ndarray) assert res.ndim == 3 if ndim == 2: assert res.shape == img.shape + (1,) else: assert res.shape == img.shape def test_segment_invalid_shape(self): img = np.zeros(shape=(1, 10, 10, 2)) sc = SegmentationCustom(dummy_segment) with pytest.raises(ValueError, match=r"Expected `2` or `3` dimensions"): sc.segment(img) def test_segment_container(self): img = ImageContainer(np.zeros(shape=(10, 10, 1)), layer="image") sc = SegmentationCustom(dummy_segment) res = sc.segment(img, layer="image", library_id=img["image"].z.values[0]) assert isinstance(res, ImageContainer) assert res.shape == img.shape assert "image" in res assert res["image"].dims == img["image"].dims class TestWatershed: @pytest.mark.parametrize("thresh", [None, 0.1, 0.5, 1.0]) def test_threshold(self, thresh: Optional[float], mocker: MockerFixture): img = np.zeros((100, 200), dtype=np.float64) img[2:10, 2:10] = 1.0 img[30:34, 10:16] = 1.0 img = ImageContainer(img, layer="image") sw = SegmentationWatershed() spy = mocker.spy(sw, "_segment") res = sw.segment(img, layer="image", library_id=img["image"].z.values[0], fn_kwargs={"thresh": thresh}) assert isinstance(res, ImageContainer) spy.assert_called_once() call = spy.call_args_list[0] assert call[1]["thresh"] == thresh class TestHighLevel: def test_invalid_layer(self, small_cont: ImageContainer): with pytest.raises(KeyError, match=r"Image layer `foobar` not found in"): segment(small_cont, layer="foobar") @pytest.mark.parametrize("method", ["watershed", dummy_segment]) def test_method(self, small_cont: ImageContainer, method: Union[str, Callable]): res = segment(small_cont, method=method, copy=True) assert isinstance(res, ImageContainer) assert res.shape == small_cont.shape if callable(method): method = SegmentationBackend.CUSTOM.s assert Key.img.segment(method) in res if method in ("log", "dog", "dog"): assert res[Key.img.segment(method)].values.max() <= 1 @pytest.mark.parametrize("dy", [11, 0.5, None]) @pytest.mark.parametrize("dx", [15, 0.1, None]) def test_size(self, small_cont: ImageContainer, dy: Optional[Union[int, float]], dx: Optional[Union[int, float]]): res = segment(small_cont, size=(dy, dx), copy=True) assert isinstance(res, ImageContainer) assert res.shape == small_cont.shape @pytest.mark.parametrize("channel", [0, 1, 2]) def test_channel(self, small_cont: ImageContainer, channel: int): segment(small_cont, copy=False, layer="image", channel=channel) assert Key.img.segment("watershed") in small_cont np.testing.assert_array_equal( list(small_cont[Key.img.segment("watershed")].dims), ["y", "x", "z", f"{small_cont['image'].dims[-1]}:{channel}"], ) def test_all_channels(self, small_cont: ImageContainer): def func(arr: np.ndarray): assert arr.shape == (small_cont.shape + (n_channels,)) return np.zeros(arr.shape[:2], dtype=np.uint8) n_channels = small_cont["image"].sizes["channels"] segment(small_cont, copy=False, layer="image", channel=None, method=func, layer_added="seg") np.testing.assert_array_equal(small_cont["seg"], np.zeros(small_cont.shape + (1, 1))) assert small_cont["seg"].dtype == _SEG_DTYPE @pytest.mark.parametrize("key_added", [None, "foo"]) def test_key_added(self, small_cont: ImageContainer, key_added: Optional[str]): res = segment(small_cont, copy=False, layer="image", layer_added=key_added) assert res is None assert Key.img.segment("watershed", layer_added=key_added) in small_cont def test_passing_kwargs(self, small_cont: ImageContainer): def func(chunk: np.ndarray, sentinel: bool = False): assert sentinel, "Sentinel not set." return np.zeros(chunk[..., 0].shape, dtype=_SEG_DTYPE) segment( small_cont, method=func, layer="image", layer_added="bar", chunks=25, lazy=False, depth=None, sentinel=True ) assert small_cont["bar"].values.dtype == _SEG_DTYPE np.testing.assert_array_equal(small_cont["bar"].values, 0) @pytest.mark.parametrize("dask_input", [False, True]) @pytest.mark.parametrize("chunks", [25, (50, 50, 1), "auto"]) @pytest.mark.parametrize("lazy", [False, True]) def test_dask_segment( self, small_cont: ImageContainer, dask_input: bool, chunks: Union[int, Tuple[int, ...], str], lazy: bool ): def func(chunk: np.ndarray): if isinstance(chunks, tuple): np.testing.assert_array_equal(chunk.shape, [chunks[0] + 2 * d, chunks[1] + 2 * d, 1]) elif isinstance(chunks, int): np.testing.assert_array_equal(chunk.shape, [chunks + 2 * d, chunks + 2 * d, 1]) return np.zeros(chunk[..., 0].shape, dtype=_SEG_DTYPE) small_cont["foo"] = da.asarray(small_cont["image"].data) if dask_input else small_cont["image"].values d = 10 assert isinstance(small_cont["foo"].data, da.Array if dask_input else np.ndarray) segment(small_cont, method=func, layer="foo", layer_added="bar", chunks=chunks, lazy=lazy, depth={0: d, 1: d}) if lazy: assert isinstance(small_cont["bar"].data, da.Array) small_cont.compute() assert isinstance(small_cont["foo"].data, np.ndarray) else: assert isinstance(small_cont["foo"].data, da.Array if dask_input else np.ndarray) assert isinstance(small_cont["bar"].data, np.ndarray) assert small_cont["bar"].values.dtype == _SEG_DTYPE np.testing.assert_array_equal(small_cont["bar"].values, 0) def test_copy(self, small_cont: ImageContainer): prev_keys = set(small_cont) res = segment(small_cont, copy=True, layer="image") assert isinstance(res, ImageContainer) assert set(small_cont) == prev_keys assert Key.img.segment("watershed") in res def test_parallelize(self, small_cont: ImageContainer): res1 = segment(small_cont, layer="image", n_jobs=1, copy=True) res2 = segment(small_cont, layer="image", n_jobs=2, copy=True) np.testing.assert_array_equal( res1[Key.img.segment("watershed")].values, res2[Key.img.segment("watershed")].values ) @pytest.mark.parametrize("chunks", [25, 50]) def test_blocking(self, small_cont: ImageContainer, chunks: int): def func(chunk: np.ndarray): labels = np.zeros(chunk[..., 0].shape, dtype=np.uint32) labels[0, 0] = 1 return labels segment(small_cont, method=func, layer="image", layer_added="bar", chunks=chunks, lazy=False, depth=None) expected = np.zeros_like(small_cont["bar"].values) start = 16 if chunks == 25 else 4 for i in range(0, 100, chunks): for j in range(0, 100, chunks): expected[i, j] = start start += 1 assert small_cont["bar"].values.dtype == _SEG_DTYPE np.testing.assert_array_equal(small_cont["bar"].values, expected) @pytest.mark.parametrize("size", [None, 11]) def test_watershed_works(self, size: Optional[int]): img_orig = np.zeros((100, 200, 30), dtype=np.float64) img_orig[2:10, 2:10] = 1.0 img_orig[30:34, 10:16] = 1.0 cont = ImageContainer(img_orig, layer="image_0") segment( img=cont, method="watershed", layer="image_0", layer_added="segment", size=size, channel=0, thresh=0.5, ) assert np.mean(cont.data["segment"].values[img_orig[:, :, 0] > 0] > 0) > 0.5 @pytest.mark.parametrize("library_id", [None, "3", ["1", "2"]]) def test_library_id(self, cont_4d: ImageContainer, library_id: Optional[Union[str, Sequence[str]]]): def func(arr: np.ndarray): assert arr.shape == cont_4d.shape + (1,) return np.ones(arr[..., 0].shape, dtype=_SEG_DTYPE) segment(cont_4d, method=func, layer="image", layer_added="image_seg", library_id=library_id, copy=False) np.testing.assert_array_equal(cont_4d["image"].coords, cont_4d["image_seg"].coords) if library_id is None: np.testing.assert_array_equal(1, cont_4d["image_seg"]) else: if isinstance(library_id, str): library_id = [library_id] for lid in library_id: np.testing.assert_array_equal(1, cont_4d["image_seg"].sel(z=lid)) for lid in set(cont_4d.library_ids) - set(library_id): np.testing.assert_array_equal(0, cont_4d["image_seg"].sel(z=lid))
true
true
f7040cb7f34ead1a89da8de11b06d15adf9b3e85
6,381
py
Python
restler-quick-start.py
mkleshchenok/restler-fuzzer
1bd7bc68a6c4de997e9fda9a9db5ffb0504b864c
[ "MIT" ]
null
null
null
restler-quick-start.py
mkleshchenok/restler-fuzzer
1bd7bc68a6c4de997e9fda9a9db5ffb0504b864c
[ "MIT" ]
null
null
null
restler-quick-start.py
mkleshchenok/restler-fuzzer
1bd7bc68a6c4de997e9fda9a9db5ffb0504b864c
[ "MIT" ]
null
null
null
# Copyright (c) Microsoft Corporation. # Licensed under the MIT License. import argparse import contextlib import os import subprocess from pathlib import Path RESTLER_TEMP_DIR = 'restler_working_dir' @contextlib.contextmanager def usedir(dir): """ Helper for 'with' statements that changes the current directory to @dir and then changes the directory back to its original once the 'with' ends. Can be thought of like pushd with an auto popd after the 'with' scope ends """ curr = os.getcwd() os.chdir(dir) try: yield finally: os.chdir(curr) def compile_spec(api_spec_path, restler_dll_path): """ Compiles a specified api spec @param api_spec_path: The absolute path to the Swagger file to compile @type api_spec_path: Str @param restler_dll_path: The absolute path to the RESTler driver's dll @type restler_dll_path: Str @return: None @rtype : None """ if not os.path.exists(RESTLER_TEMP_DIR): os.makedirs(RESTLER_TEMP_DIR) with usedir(RESTLER_TEMP_DIR): command=f"dotnet \"{restler_dll_path}\" compile --api_spec \"{api_spec_path}\"" print(f"command: {command}") subprocess.run(command, shell=True) def add_common_settings(ip, port, host, use_ssl, command): if not use_ssl: command = f"{command} --no_ssl" if ip is not None: command = f"{command} --target_ip {ip}" if port is not None: command = f"{command} --target_port {port}" if host is not None: command = f"{command} --host {host}" return command def replay_bug(ip, port, host, use_ssl, restler_dll_path, replay_log): """ Runs RESTler's replay mode on the specified replay file """ with usedir(RESTLER_TEMP_DIR): command = ( f"dotnet \"{restler_dll_path}\" replay --replay_log \"{replay_log}\"" ) command = add_common_settings(ip, port, host, use_ssl, command) print(f"command: {command}\n") subprocess.run(command, shell=True) def replay_from_dir(ip, port, host, use_ssl, restler_dll_path, replay_dir): import glob from pathlib import Path # get all the 500 replay files in the bug buckets directory bug_buckets = glob.glob(os.path.join(replay_dir, 'RestlerResults', '**/bug_buckets/*500*')) print(f"buckets: {bug_buckets}") for file_path in bug_buckets: if "bug_buckets" in os.path.basename(file_path): continue print(f"Testing replay file: {file_path}") replay_bug(ip, port, host, use_ssl, restler_dll_path, Path(file_path).absolute()) pass def test_spec(ip, port, host, use_ssl, restler_dll_path, task): """ Runs RESTler's test mode on a specified Compile directory @param ip: The IP of the service to test @type ip: Str @param port: The port of the service to test @type port: Str @param host: The hostname of the service to test @type host: Str @param use_ssl: If False, set the --no_ssl parameter when executing RESTler @type use_ssl: Boolean @param restler_dll_path: The absolute path to the RESTler driver's dll @type restler_dll_path: Str @return: None @rtype : None """ import json with usedir(RESTLER_TEMP_DIR): compile_dir = Path(f'Compile') grammar_file_path = compile_dir.joinpath('grammar.py') dictionary_file_path = compile_dir.joinpath('dict.json') settings_file_path = compile_dir.joinpath('engine_settings.json') command = ( f"dotnet \"{restler_dll_path}\" {task} --grammar_file \"{grammar_file_path}\" --dictionary_file \"{dictionary_file_path}\"" f" --settings \"{settings_file_path}\"" ) print(f"command: {command}\n") command = add_common_settings(ip, port, host, use_ssl, command) subprocess.run(command, shell=True) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--api_spec_path', help='The API Swagger specification to compile and test', type=str, required=False, default=None) parser.add_argument('--ip', help='The IP of the service to test', type=str, required=False, default=None) parser.add_argument('--port', help='The port of the service to test', type=str, required=False, default=None) parser.add_argument('--restler_drop_dir', help="The path to the RESTler drop", type=str, required=True) parser.add_argument('--use_ssl', help='Set this flag if you want to use SSL validation for the socket', action='store_true') parser.add_argument('--host', help='The hostname of the service to test', type=str, required=False, default=None) parser.add_argument('--task', help='The task to run (test, fuzz-lean, fuzz, or replay)' 'For test, fuzz-lean, and fuzz, the spec is compiled first.' 'For replay, bug buckets from the specified task directory are re-played.', type=str, required=False, default='test') parser.add_argument('--replay_bug_buckets_dir', help='For the replay task, specifies the directory in which to search for bug buckets.', type=str, required=False, default=None) args = parser.parse_args() restler_dll_path = Path(os.path.abspath(args.restler_drop_dir)).joinpath('restler', 'Restler.dll') print(f"\nrestler_dll_path: {restler_dll_path}\n") if args.task == "replay": replay_from_dir(args.ip, args.port, args.host, args.use_ssl, restler_dll_path.absolute(), args.replay_bug_buckets_dir) else: if args.api_spec_path is None: print("api_spec_path is required for all tasks except the replay task.") exit(-1) api_spec_path = os.path.abspath(args.api_spec_path) compile_spec(api_spec_path, restler_dll_path.absolute()) test_spec(args.ip, args.port, args.host, args.use_ssl, restler_dll_path.absolute(), args.task) print(f"Test complete.\nSee {os.path.abspath(RESTLER_TEMP_DIR)} for results.")
40.132075
135
0.640025
import argparse import contextlib import os import subprocess from pathlib import Path RESTLER_TEMP_DIR = 'restler_working_dir' @contextlib.contextmanager def usedir(dir): curr = os.getcwd() os.chdir(dir) try: yield finally: os.chdir(curr) def compile_spec(api_spec_path, restler_dll_path): if not os.path.exists(RESTLER_TEMP_DIR): os.makedirs(RESTLER_TEMP_DIR) with usedir(RESTLER_TEMP_DIR): command=f"dotnet \"{restler_dll_path}\" compile --api_spec \"{api_spec_path}\"" print(f"command: {command}") subprocess.run(command, shell=True) def add_common_settings(ip, port, host, use_ssl, command): if not use_ssl: command = f"{command} --no_ssl" if ip is not None: command = f"{command} --target_ip {ip}" if port is not None: command = f"{command} --target_port {port}" if host is not None: command = f"{command} --host {host}" return command def replay_bug(ip, port, host, use_ssl, restler_dll_path, replay_log): with usedir(RESTLER_TEMP_DIR): command = ( f"dotnet \"{restler_dll_path}\" replay --replay_log \"{replay_log}\"" ) command = add_common_settings(ip, port, host, use_ssl, command) print(f"command: {command}\n") subprocess.run(command, shell=True) def replay_from_dir(ip, port, host, use_ssl, restler_dll_path, replay_dir): import glob from pathlib import Path bug_buckets = glob.glob(os.path.join(replay_dir, 'RestlerResults', '**/bug_buckets/*500*')) print(f"buckets: {bug_buckets}") for file_path in bug_buckets: if "bug_buckets" in os.path.basename(file_path): continue print(f"Testing replay file: {file_path}") replay_bug(ip, port, host, use_ssl, restler_dll_path, Path(file_path).absolute()) pass def test_spec(ip, port, host, use_ssl, restler_dll_path, task): import json with usedir(RESTLER_TEMP_DIR): compile_dir = Path(f'Compile') grammar_file_path = compile_dir.joinpath('grammar.py') dictionary_file_path = compile_dir.joinpath('dict.json') settings_file_path = compile_dir.joinpath('engine_settings.json') command = ( f"dotnet \"{restler_dll_path}\" {task} --grammar_file \"{grammar_file_path}\" --dictionary_file \"{dictionary_file_path}\"" f" --settings \"{settings_file_path}\"" ) print(f"command: {command}\n") command = add_common_settings(ip, port, host, use_ssl, command) subprocess.run(command, shell=True) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--api_spec_path', help='The API Swagger specification to compile and test', type=str, required=False, default=None) parser.add_argument('--ip', help='The IP of the service to test', type=str, required=False, default=None) parser.add_argument('--port', help='The port of the service to test', type=str, required=False, default=None) parser.add_argument('--restler_drop_dir', help="The path to the RESTler drop", type=str, required=True) parser.add_argument('--use_ssl', help='Set this flag if you want to use SSL validation for the socket', action='store_true') parser.add_argument('--host', help='The hostname of the service to test', type=str, required=False, default=None) parser.add_argument('--task', help='The task to run (test, fuzz-lean, fuzz, or replay)' 'For test, fuzz-lean, and fuzz, the spec is compiled first.' 'For replay, bug buckets from the specified task directory are re-played.', type=str, required=False, default='test') parser.add_argument('--replay_bug_buckets_dir', help='For the replay task, specifies the directory in which to search for bug buckets.', type=str, required=False, default=None) args = parser.parse_args() restler_dll_path = Path(os.path.abspath(args.restler_drop_dir)).joinpath('restler', 'Restler.dll') print(f"\nrestler_dll_path: {restler_dll_path}\n") if args.task == "replay": replay_from_dir(args.ip, args.port, args.host, args.use_ssl, restler_dll_path.absolute(), args.replay_bug_buckets_dir) else: if args.api_spec_path is None: print("api_spec_path is required for all tasks except the replay task.") exit(-1) api_spec_path = os.path.abspath(args.api_spec_path) compile_spec(api_spec_path, restler_dll_path.absolute()) test_spec(args.ip, args.port, args.host, args.use_ssl, restler_dll_path.absolute(), args.task) print(f"Test complete.\nSee {os.path.abspath(RESTLER_TEMP_DIR)} for results.")
true
true
f7040db7824984867af73d9593ba82caf858530e
5,146
py
Python
src/pretalx/common/views.py
martinheidegger/pretalx
d812e665c1c5ce29df3eafc1985af08e4d986fef
[ "Apache-2.0" ]
null
null
null
src/pretalx/common/views.py
martinheidegger/pretalx
d812e665c1c5ce29df3eafc1985af08e4d986fef
[ "Apache-2.0" ]
null
null
null
src/pretalx/common/views.py
martinheidegger/pretalx
d812e665c1c5ce29df3eafc1985af08e4d986fef
[ "Apache-2.0" ]
null
null
null
import urllib from contextlib import suppress from django.conf import settings from django.contrib import messages from django.contrib.auth import login from django.core.exceptions import PermissionDenied, SuspiciousOperation from django.http import FileResponse, Http404, HttpResponseServerError from django.shortcuts import redirect from django.template import TemplateDoesNotExist, loader from django.urls import get_callable from django.utils.http import url_has_allowed_host_and_scheme from django.utils.timezone import now from django.views.generic import FormView from django.views.generic.detail import SingleObjectTemplateResponseMixin from django.views.generic.edit import ModelFormMixin, ProcessFormView from django_context_decorator import context from pretalx.cfp.forms.auth import ResetForm from pretalx.common.mail import SendMailException from pretalx.common.phrases import phrases from pretalx.person.forms import UserForm from pretalx.person.models import User class CreateOrUpdateView( SingleObjectTemplateResponseMixin, ModelFormMixin, ProcessFormView ): def set_object(self): if getattr(self, "object", None) is None: setattr(self, "object", None) with suppress(self.model.DoesNotExist, AttributeError): setattr(self, "object", self.get_object()) def get(self, request, *args, **kwargs): self.set_object() return super().get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.set_object() return super().post(request, *args, **kwargs) def is_form_bound(request, form_name, form_param="form"): return request.method == "POST" and request.POST.get(form_param) == form_name def get_static(request, path, content_type): # pragma: no cover """TODO: move to staticfiles usage as per https://gist.github.com/SmileyChris/8d472f2a67526e36f39f3c33520182bc This would avoid potential directory traversal by … a malicious urlconfig, so not a huge attack vector.""" path = settings.BASE_DIR / "pretalx/static" / path if not path.exists(): raise Http404() return FileResponse( open(path, "rb"), content_type=content_type, as_attachment=False ) class GenericLoginView(FormView): form_class = UserForm @context def password_reset_link(self): return self.get_password_reset_link() def dispatch(self, request, *args, **kwargs): if not self.request.user.is_anonymous: return redirect(self.get_success_url()) return super().dispatch(request, *args, **kwargs) def get_success_url(self): params = self.request.GET.copy() url = urllib.parse.unquote(params.pop("next", [""])[0]) params = "?" + params.urlencode() if params else "" if url and url_has_allowed_host_and_scheme(url, allowed_hosts=None): return url + params return self.success_url + params def form_valid(self, form): pk = form.save() user = User.objects.filter(pk=pk).first() login(self.request, user, backend="django.contrib.auth.backends.ModelBackend") return redirect(self.get_success_url()) class GenericResetView(FormView): form_class = ResetForm def form_valid(self, form): user = form.cleaned_data["user"] if not user or ( user.pw_reset_time and (now() - user.pw_reset_time).total_seconds() < 3600 * 24 ): messages.success(self.request, phrases.cfp.auth_password_reset) return redirect(self.get_success_url()) try: user.reset_password( event=getattr(self.request, "event", None), orga="orga" in self.request.resolver_match.namespaces, ) except SendMailException: # pragma: no cover messages.error(self.request, phrases.base.error_sending_mail) return self.get(self.request, *self.args, **self.kwargs) messages.success(self.request, phrases.cfp.auth_password_reset) user.log_action("pretalx.user.password.reset") return redirect(self.get_success_url()) def handle_500(request): try: template = loader.get_template("500.html") except TemplateDoesNotExist: # pragma: no cover return HttpResponseServerError( "Internal server error. Please contact the administrator for details.", content_type="text/html", ) context = {} try: # This should never fail, but can't be too cautious in error views context["request_path"] = urllib.parse.quote(request.path) except Exception: # pragma: no cover pass return HttpResponseServerError(template.render(context)) def error_view(status_code): if status_code == 4031: return get_callable(settings.CSRF_FAILURE_VIEW) if status_code == 500: return handle_500 exceptions = { 400: SuspiciousOperation, 403: PermissionDenied, 404: Http404, } exception = exceptions[status_code] def error_view(request, *args, **kwargs): raise exception return error_view
35.246575
114
0.695103
import urllib from contextlib import suppress from django.conf import settings from django.contrib import messages from django.contrib.auth import login from django.core.exceptions import PermissionDenied, SuspiciousOperation from django.http import FileResponse, Http404, HttpResponseServerError from django.shortcuts import redirect from django.template import TemplateDoesNotExist, loader from django.urls import get_callable from django.utils.http import url_has_allowed_host_and_scheme from django.utils.timezone import now from django.views.generic import FormView from django.views.generic.detail import SingleObjectTemplateResponseMixin from django.views.generic.edit import ModelFormMixin, ProcessFormView from django_context_decorator import context from pretalx.cfp.forms.auth import ResetForm from pretalx.common.mail import SendMailException from pretalx.common.phrases import phrases from pretalx.person.forms import UserForm from pretalx.person.models import User class CreateOrUpdateView( SingleObjectTemplateResponseMixin, ModelFormMixin, ProcessFormView ): def set_object(self): if getattr(self, "object", None) is None: setattr(self, "object", None) with suppress(self.model.DoesNotExist, AttributeError): setattr(self, "object", self.get_object()) def get(self, request, *args, **kwargs): self.set_object() return super().get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.set_object() return super().post(request, *args, **kwargs) def is_form_bound(request, form_name, form_param="form"): return request.method == "POST" and request.POST.get(form_param) == form_name def get_static(request, path, content_type): path = settings.BASE_DIR / "pretalx/static" / path if not path.exists(): raise Http404() return FileResponse( open(path, "rb"), content_type=content_type, as_attachment=False ) class GenericLoginView(FormView): form_class = UserForm @context def password_reset_link(self): return self.get_password_reset_link() def dispatch(self, request, *args, **kwargs): if not self.request.user.is_anonymous: return redirect(self.get_success_url()) return super().dispatch(request, *args, **kwargs) def get_success_url(self): params = self.request.GET.copy() url = urllib.parse.unquote(params.pop("next", [""])[0]) params = "?" + params.urlencode() if params else "" if url and url_has_allowed_host_and_scheme(url, allowed_hosts=None): return url + params return self.success_url + params def form_valid(self, form): pk = form.save() user = User.objects.filter(pk=pk).first() login(self.request, user, backend="django.contrib.auth.backends.ModelBackend") return redirect(self.get_success_url()) class GenericResetView(FormView): form_class = ResetForm def form_valid(self, form): user = form.cleaned_data["user"] if not user or ( user.pw_reset_time and (now() - user.pw_reset_time).total_seconds() < 3600 * 24 ): messages.success(self.request, phrases.cfp.auth_password_reset) return redirect(self.get_success_url()) try: user.reset_password( event=getattr(self.request, "event", None), orga="orga" in self.request.resolver_match.namespaces, ) except SendMailException: messages.error(self.request, phrases.base.error_sending_mail) return self.get(self.request, *self.args, **self.kwargs) messages.success(self.request, phrases.cfp.auth_password_reset) user.log_action("pretalx.user.password.reset") return redirect(self.get_success_url()) def handle_500(request): try: template = loader.get_template("500.html") except TemplateDoesNotExist: return HttpResponseServerError( "Internal server error. Please contact the administrator for details.", content_type="text/html", ) context = {} try: context["request_path"] = urllib.parse.quote(request.path) except Exception: # pragma: no cover pass return HttpResponseServerError(template.render(context)) def error_view(status_code): if status_code == 4031: return get_callable(settings.CSRF_FAILURE_VIEW) if status_code == 500: return handle_500 exceptions = { 400: SuspiciousOperation, 403: PermissionDenied, 404: Http404, } exception = exceptions[status_code] def error_view(request, *args, **kwargs): raise exception return error_view
true
true
f7040e2527e38d67c7c5bc0c51b708b2619ef854
630
py
Python
companies/migrations/0001_initial.py
pankleshwaria/Django-REST-API
3844234036e3d6906f0ca8656d559be3dd8bcc95
[ "MIT" ]
1
2020-07-16T08:12:27.000Z
2020-07-16T08:12:27.000Z
companies/migrations/0001_initial.py
pankleshwaria/Django-REST-API
3844234036e3d6906f0ca8656d559be3dd8bcc95
[ "MIT" ]
null
null
null
companies/migrations/0001_initial.py
pankleshwaria/Django-REST-API
3844234036e3d6906f0ca8656d559be3dd8bcc95
[ "MIT" ]
null
null
null
# Generated by Django 2.1 on 2019-10-12 09:44 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Stock', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('ticker', models.CharField(max_length=10)), ('open', models.FloatField()), ('close', models.FloatField()), ('volume', models.IntegerField()), ], ), ]
25.2
114
0.544444
from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Stock', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('ticker', models.CharField(max_length=10)), ('open', models.FloatField()), ('close', models.FloatField()), ('volume', models.IntegerField()), ], ), ]
true
true
f7040ee3a06bea6765545d11b6476c3e3b070742
97,839
py
Python
cinder/tests/unit/test_image_utils.py
stackhpc/cinder
93f0ca4dc9eedee10df2f03dad834a31b7f09847
[ "Apache-2.0" ]
null
null
null
cinder/tests/unit/test_image_utils.py
stackhpc/cinder
93f0ca4dc9eedee10df2f03dad834a31b7f09847
[ "Apache-2.0" ]
28
2017-08-17T14:46:05.000Z
2022-03-29T12:42:12.000Z
cinder/tests/unit/test_image_utils.py
alokchandra11/cinder
121d9f512b4a6d1afe6a690effb7c2b379040a7b
[ "Apache-2.0" ]
3
2017-04-27T16:11:40.000Z
2020-02-12T21:27:00.000Z
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Unit tests for image utils.""" import errno import math import cryptography import ddt import mock from oslo_concurrency import processutils from oslo_utils import units from six.moves import builtins from cinder import exception from cinder.image import image_utils from cinder import test from cinder.tests.unit import fake_constants as fake from cinder.volume import throttling class TestQemuImgInfo(test.TestCase): @mock.patch('os.name', new='posix') @mock.patch('oslo_utils.imageutils.QemuImgInfo') @mock.patch('cinder.utils.execute') def test_qemu_img_info(self, mock_exec, mock_info): mock_out = mock.sentinel.out mock_err = mock.sentinel.err test_path = mock.sentinel.path mock_exec.return_value = (mock_out, mock_err) output = image_utils.qemu_img_info(test_path) mock_exec.assert_called_once_with('env', 'LC_ALL=C', 'qemu-img', 'info', test_path, run_as_root=True, prlimit=image_utils.QEMU_IMG_LIMITS) self.assertEqual(mock_info.return_value, output) @mock.patch('os.name', new='posix') @mock.patch('oslo_utils.imageutils.QemuImgInfo') @mock.patch('cinder.utils.execute') def test_qemu_img_info_not_root(self, mock_exec, mock_info): mock_out = mock.sentinel.out mock_err = mock.sentinel.err test_path = mock.sentinel.path mock_exec.return_value = (mock_out, mock_err) output = image_utils.qemu_img_info(test_path, force_share=False, run_as_root=False) mock_exec.assert_called_once_with('env', 'LC_ALL=C', 'qemu-img', 'info', test_path, run_as_root=False, prlimit=image_utils.QEMU_IMG_LIMITS) self.assertEqual(mock_info.return_value, output) @mock.patch('cinder.image.image_utils.os') @mock.patch('oslo_utils.imageutils.QemuImgInfo') @mock.patch('cinder.utils.execute') def test_qemu_img_info_on_nt(self, mock_exec, mock_info, mock_os): mock_out = mock.sentinel.out mock_err = mock.sentinel.err test_path = mock.sentinel.path mock_exec.return_value = (mock_out, mock_err) mock_os.name = 'nt' output = image_utils.qemu_img_info(test_path) mock_exec.assert_called_once_with('qemu-img', 'info', test_path, run_as_root=True, prlimit=image_utils.QEMU_IMG_LIMITS) self.assertEqual(mock_info.return_value, output) @mock.patch('cinder.utils.execute') def test_get_qemu_img_version(self, mock_exec): mock_out = "qemu-img version 2.0.0" mock_err = mock.sentinel.err mock_exec.return_value = (mock_out, mock_err) expected_version = [2, 0, 0] version = image_utils.get_qemu_img_version() mock_exec.assert_called_once_with('qemu-img', '--version', check_exit_code=False) self.assertEqual(expected_version, version) @mock.patch.object(image_utils, 'get_qemu_img_version') def test_validate_qemu_img_version(self, mock_get_qemu_img_version): fake_current_version = [1, 8] mock_get_qemu_img_version.return_value = fake_current_version minimum_version = '1.8' image_utils.check_qemu_img_version(minimum_version) mock_get_qemu_img_version.assert_called_once_with() @mock.patch.object(image_utils, 'get_qemu_img_version') def _test_validate_unsupported_qemu_img_version(self, mock_get_qemu_img_version, current_version=None): mock_get_qemu_img_version.return_value = current_version minimum_version = '2.0' self.assertRaises(exception.VolumeBackendAPIException, image_utils.check_qemu_img_version, minimum_version) mock_get_qemu_img_version.assert_called_once_with() def test_validate_qemu_img_version_not_installed(self): self._test_validate_unsupported_qemu_img_version() def test_validate_older_qemu_img_version(self): self._test_validate_unsupported_qemu_img_version( current_version=[1, 8]) @ddt.ddt class TestConvertImage(test.TestCase): @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=True) def test_defaults_block_dev_with_size_info(self, mock_isblk, mock_exec, mock_info): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format mock_info.return_value.virtual_size = 1048576 throttle = throttling.Throttle(prefix=['cgcmd']) with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True): output = image_utils.convert_image(source, dest, out_format, throttle=throttle) self.assertIsNone(output) mock_exec.assert_called_once_with('cgcmd', 'qemu-img', 'convert', '-O', out_format, '-t', 'none', source, dest, run_as_root=True) mock_exec.reset_mock() with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=False): output = image_utils.convert_image(source, dest, out_format) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, source, dest, run_as_root=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=True) def test_defaults_block_dev_without_size_info(self, mock_isblk, mock_exec, mock_info): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format mock_info.side_effect = ValueError throttle = throttling.Throttle(prefix=['cgcmd']) with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True): output = image_utils.convert_image(source, dest, out_format, throttle=throttle) mock_info.assert_called_once_with(source, run_as_root=True) self.assertIsNone(output) mock_exec.assert_called_once_with('cgcmd', 'qemu-img', 'convert', '-O', out_format, '-t', 'none', source, dest, run_as_root=True) mock_exec.reset_mock() with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=False): output = image_utils.convert_image(source, dest, out_format) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, source, dest, run_as_root=True) @mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) def test_defaults_not_block_dev_with_size_info(self, mock_isblk, mock_exec, mock_info, mock_odirect): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format out_subformat = 'fake_subformat' mock_info.return_value.virtual_size = 1048576 output = image_utils.convert_image(source, dest, out_format, out_subformat=out_subformat) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, '-o', 'subformat=%s' % out_subformat, source, dest, run_as_root=True) @mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) def test_defaults_not_block_dev_without_size_info(self, mock_isblk, mock_exec, mock_info, mock_odirect): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format out_subformat = 'fake_subformat' mock_info.side_effect = ValueError output = image_utils.convert_image(source, dest, out_format, out_subformat=out_subformat) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, '-o', 'subformat=%s' % out_subformat, source, dest, run_as_root=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=True) def test_defaults_block_dev_ami_img(self, mock_isblk, mock_exec, mock_info): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format mock_info.return_value.virtual_size = 1048576 with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True): output = image_utils.convert_image(source, dest, out_format, src_format='AMI') self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, '-t', 'none', source, dest, run_as_root=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) @mock.patch('cinder.volume.volume_utils.check_for_odirect_support') def test_convert_to_vhd(self, mock_check_odirect, mock_isblk, mock_exec, mock_info): source = mock.sentinel.source dest = mock.sentinel.dest out_format = "vhd" mock_info.return_value.virtual_size = 1048576 output = image_utils.convert_image(source, dest, out_format) self.assertIsNone(output) # Qemu uses the legacy "vpc" format name, instead of "vhd". mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', 'vpc', source, dest, run_as_root=True) @ddt.data(True, False) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) def test_convert_to_qcow2(self, compress_option, mock_isblk, mock_exec, mock_info): self.override_config('image_compress_on_upload', compress_option) source = mock.sentinel.source dest = mock.sentinel.dest out_format = 'qcow2' mock_info.return_value.virtual_size = 1048576 image_utils.convert_image(source, dest, out_format, compress=True) exec_args = ['qemu-img', 'convert', '-O', 'qcow2'] if compress_option: exec_args.append('-c') exec_args.extend((source, dest)) mock_exec.assert_called_once_with(*exec_args, run_as_root=True) @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) @mock.patch('os.path.dirname', return_value='fakedir') @mock.patch('os.path.ismount', return_value=True) @mock.patch('oslo_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.utils.tempdir') @mock.patch.object(image_utils.LOG, 'error') def test_not_enough_conversion_space(self, mock_log, mock_tempdir, mock_make, mock_ismount, mock_dirname, mock_isblk, mock_exec, mock_info, mock_odirect, mock_conf): source = mock.sentinel.source mock_conf.image_conversion_dir = 'fakedir' dest = [mock_conf.image_conversion_dir] out_format = mock.sentinel.out_format mock_info.side_effect = ValueError mock_exec.side_effect = processutils.ProcessExecutionError( stderr='No space left on device') self.assertRaises(processutils.ProcessExecutionError, image_utils.convert_image, source, dest, out_format) mock_log.assert_called_with('Insufficient free space on fakedir for' ' image conversion.') class TestResizeImage(test.TestCase): @mock.patch('cinder.utils.execute') def test_defaults(self, mock_exec): source = mock.sentinel.source size = mock.sentinel.size output = image_utils.resize_image(source, size) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'resize', source, 'sentinel.sizeG', run_as_root=False) @mock.patch('cinder.utils.execute') def test_run_as_root(self, mock_exec): source = mock.sentinel.source size = mock.sentinel.size output = image_utils.resize_image(source, size, run_as_root=True) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'resize', source, 'sentinel.sizeG', run_as_root=True) class TestFetch(test.TestCase): @mock.patch('eventlet.tpool.Proxy') @mock.patch('os.stat') @mock.patch('cinder.image.image_utils.fileutils') def test_defaults(self, mock_fileutils, mock_stat, mock_proxy): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id path = 'test_path' _user_id = mock.sentinel._user_id _project_id = mock.sentinel._project_id mock_open = mock.mock_open() mock_stat.return_value.st_size = 1048576 with mock.patch('cinder.image.image_utils.open', new=mock_open, create=True): output = image_utils.fetch(ctxt, image_service, image_id, path, _user_id, _project_id) self.assertIsNone(output) mock_proxy.assert_called_once_with(mock_open.return_value) image_service.download.assert_called_once_with(ctxt, image_id, mock_proxy.return_value) mock_open.assert_called_once_with(path, 'wb') mock_fileutils.remove_path_on_error.assert_called_once_with(path) (mock_fileutils.remove_path_on_error.return_value.__enter__ .assert_called_once_with()) (mock_fileutils.remove_path_on_error.return_value.__exit__ .assert_called_once_with(None, None, None)) def test_fetch_enospc(self): context = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id e = exception.ImageTooBig(image_id=image_id, reason = "fake") e.errno = errno.ENOSPC image_service.download.side_effect = e path = '/test_path' _user_id = mock.sentinel._user_id _project_id = mock.sentinel._project_id with mock.patch('cinder.image.image_utils.open', new=mock.mock_open(), create=True): self.assertRaises(exception.ImageTooBig, image_utils.fetch, context, image_service, image_id, path, _user_id, _project_id) def test_fetch_ioerror(self): context = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id e = IOError() e.errno = errno.ECONNRESET e.strerror = 'Some descriptive message' image_service.download.side_effect = e path = '/test_path' _user_id = mock.sentinel._user_id _project_id = mock.sentinel._project_id with mock.patch('cinder.image.image_utils.open', new=mock.mock_open(), create=True): self.assertRaisesRegex(exception.ImageDownloadFailed, e.strerror, image_utils.fetch, context, image_service, image_id, path, _user_id, _project_id) class MockVerifier(object): def update(self, data): return def verify(self): return True class BadVerifier(object): def update(self, data): return def verify(self): raise cryptography.exceptions.InvalidSignature( 'Invalid signature.' ) class TestVerifyImageSignature(test.TestCase): @mock.patch('cursive.signature_utils.get_verifier') @mock.patch('oslo_utils.fileutils.remove_path_on_error') def test_image_signature_verify_failed(self, mock_remove, mock_get): self.mock_object(builtins, 'open', mock.mock_open()) ctxt = mock.sentinel.context metadata = {'name': 'test image', 'is_public': False, 'protected': False, 'properties': {'img_signature_certificate_uuid': 'fake_uuid', 'img_signature_hash_method': 'SHA-256', 'img_signature': 'signature', 'img_signature_key_type': 'RSA-PSS'}} class FakeImageService(object): def show(self, context, image_id): return metadata self.flags(verify_glance_signatures='enabled') mock_get.return_value = BadVerifier() self.assertRaises(exception.ImageSignatureVerificationException, image_utils.verify_glance_image_signature, ctxt, FakeImageService(), 'fake_id', 'fake_path') mock_get.assert_called_once_with( context=ctxt, img_signature_certificate_uuid='fake_uuid', img_signature_hash_method='SHA-256', img_signature='signature', img_signature_key_type='RSA-PSS') @mock.patch('cursive.signature_utils.get_verifier') def test_image_signature_metadata_missing(self, mock_get): ctxt = mock.sentinel.context metadata = {'name': 'test image', 'is_public': False, 'protected': False, 'properties': {}} class FakeImageService(object): def show(self, context, image_id): return metadata self.flags(verify_glance_signatures='enabled') result = image_utils.verify_glance_image_signature( ctxt, FakeImageService(), 'fake_id', 'fake_path') self.assertFalse(result) mock_get.assert_not_called() @mock.patch('cursive.signature_utils.get_verifier') def test_image_signature_metadata_incomplete(self, mock_get): ctxt = mock.sentinel.context metadata = {'name': 'test image', 'is_public': False, 'protected': False, 'properties': {'img_signature_certificate_uuid': None, 'img_signature_hash_method': 'SHA-256', 'img_signature': 'signature', 'img_signature_key_type': 'RSA-PSS'}} class FakeImageService(object): def show(self, context, image_id): return metadata self.flags(verify_glance_signatures='enabled') self.assertRaises(exception.InvalidSignatureImage, image_utils.verify_glance_image_signature, ctxt, FakeImageService(), 'fake_id', 'fake_path') mock_get.assert_not_called() @mock.patch('six.moves.builtins.open') @mock.patch('eventlet.tpool.execute') @mock.patch('cursive.signature_utils.get_verifier') @mock.patch('oslo_utils.fileutils.remove_path_on_error') def test_image_signature_verify_success(self, mock_remove, mock_get, mock_exec, mock_open): ctxt = mock.sentinel.context metadata = {'name': 'test image', 'is_public': False, 'protected': False, 'properties': {'img_signature_certificate_uuid': 'fake_uuid', 'img_signature_hash_method': 'SHA-256', 'img_signature': 'signature', 'img_signature_key_type': 'RSA-PSS'}} class FakeImageService(object): def show(self, context, image_id): return metadata self.flags(verify_glance_signatures='enabled') mock_get.return_value = MockVerifier() result = image_utils.verify_glance_image_signature( ctxt, FakeImageService(), 'fake_id', 'fake_path') self.assertTrue(result) mock_exec.assert_called_once_with( image_utils._verify_image, mock_open.return_value.__enter__.return_value, mock_get.return_value) mock_get.assert_called_once_with( context=ctxt, img_signature_certificate_uuid='fake_uuid', img_signature_hash_method='SHA-256', img_signature='signature', img_signature_key_type='RSA-PSS') class TestVerifyImage(test.TestCase): @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_defaults(self, mock_fetch, mock_fileutils, mock_info): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest mock_data = mock_info.return_value mock_data.file_format = 'test_format' mock_data.backing_file = None output = image_utils.fetch_verify_image(ctxt, image_service, image_id, dest) self.assertIsNone(output) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, dest, None, None) mock_info.assert_called_once_with(dest, run_as_root=True, force_share=False) mock_fileutils.remove_path_on_error.assert_called_once_with(dest) (mock_fileutils.remove_path_on_error.return_value.__enter__ .assert_called_once_with()) (mock_fileutils.remove_path_on_error.return_value.__exit__ .assert_called_once_with(None, None, None)) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_kwargs(self, mock_fetch, mock_fileutils, mock_info, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 2 run_as_root = mock.sentinel.run_as_root mock_data = mock_info.return_value mock_data.file_format = 'test_format' mock_data.backing_file = None mock_data.virtual_size = 1 output = image_utils.fetch_verify_image( ctxt, image_service, image_id, dest, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, dest, None, None) mock_fileutils.remove_path_on_error.assert_called_once_with(dest) (mock_fileutils.remove_path_on_error.return_value.__enter__ .assert_called_once_with()) (mock_fileutils.remove_path_on_error.return_value.__exit__ .assert_called_once_with(None, None, None)) mock_check_size.assert_called_once_with(mock_data.virtual_size, size, image_id) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_format_error(self, mock_fetch, mock_fileutils, mock_info): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest mock_data = mock_info.return_value mock_data.file_format = None mock_data.backing_file = None self.assertRaises(exception.ImageUnacceptable, image_utils.fetch_verify_image, ctxt, image_service, image_id, dest) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_backing_file_error(self, mock_fetch, mock_fileutils, mock_info): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest mock_data = mock_info.return_value mock_data.file_format = 'test_format' mock_data.backing_file = 'test_backing_file' self.assertRaises(exception.ImageUnacceptable, image_utils.fetch_verify_image, ctxt, image_service, image_id, dest) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_size_error(self, mock_fetch, mock_fileutils, mock_info, mock_check_size): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest size = 1 mock_data = mock_info.return_value mock_data.file_format = 'test_format' mock_data.backing_file = None mock_data.virtual_size = 2 * units.Gi mock_check_size.side_effect = exception.ImageUnacceptable( image_id='fake_image_id', reason='test') self.assertRaises(exception.ImageUnacceptable, image_utils.fetch_verify_image, ctxt, image_service, image_id, dest, size=size) class TestTemporaryDir(test.TestCase): @mock.patch('cinder.image.image_utils.CONF') @mock.patch('oslo_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.utils.tempdir') def test_conv_dir_exists(self, mock_tempdir, mock_make, mock_conf): mock_conf.image_conversion_dir = mock.sentinel.conv_dir output = image_utils.temporary_dir() self.assertTrue(mock_make.called) mock_tempdir.assert_called_once_with(dir=mock.sentinel.conv_dir) self.assertEqual(output, mock_tempdir.return_value) @mock.patch('cinder.image.image_utils.CONF') @mock.patch('oslo_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.utils.tempdir') def test_create_conv_dir(self, mock_tempdir, mock_make, mock_conf): mock_conf.image_conversion_dir = mock.sentinel.conv_dir output = image_utils.temporary_dir() mock_make.assert_called_once_with(mock.sentinel.conv_dir) mock_tempdir.assert_called_once_with(dir=mock.sentinel.conv_dir) self.assertEqual(output, mock_tempdir.return_value) @mock.patch('cinder.image.image_utils.CONF') @mock.patch('oslo_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.utils.tempdir') def test_no_conv_dir(self, mock_tempdir, mock_make, mock_conf): mock_conf.image_conversion_dir = None output = image_utils.temporary_dir() self.assertTrue(mock_make.called) mock_tempdir.assert_called_once_with(dir=None) self.assertEqual(output, mock_tempdir.return_value) @ddt.ddt class TestUploadVolume(test.TestCase): @ddt.data((mock.sentinel.disk_format, mock.sentinel.disk_format, True), (mock.sentinel.disk_format, mock.sentinel.disk_format, False), ('ploop', 'parallels', True), ('ploop', 'parallels', False)) @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_diff_format(self, image_format, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_proxy): input_format, output_format, do_compress = image_format ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': input_format, 'container_format': mock.sentinel.container_format} volume_path = mock.sentinel.volume_path mock_os.name = 'posix' data = mock_info.return_value data.file_format = output_format data.backing_file = None temp_file = mock_temp.return_value.__enter__.return_value output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path, compress=do_compress) self.assertIsNone(output) mock_convert.assert_called_once_with(volume_path, temp_file, output_format, run_as_root=True, compress=do_compress) mock_info.assert_called_with(temp_file, run_as_root=True) self.assertEqual(2, mock_info.call_count) mock_open.assert_called_once_with(temp_file, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.utils.temporary_chown') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_same_format(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_chown, mock_proxy): ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': 'raw', 'container_format': mock.sentinel.container_format} volume_path = mock.sentinel.volume_path mock_os.name = 'posix' mock_os.access.return_value = False output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path) self.assertIsNone(output) self.assertFalse(mock_convert.called) self.assertFalse(mock_info.called) mock_chown.assert_called_once_with(volume_path) mock_open.assert_called_once_with(volume_path, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) @mock.patch('cinder.image.accelerator.ImageAccel._get_engine') @mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready', return_value = True) @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.utils.temporary_chown') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_same_format_compressed(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_chown, mock_proxy, mock_engine_ready, mock_get_engine): class fakeEngine(object): def __init__(self): pass def compress_img(self, src, dest, run_as_root): pass ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': 'raw', 'container_format': 'compressed'} mock_conf.allow_compression_on_image_upload = True volume_path = mock.sentinel.volume_path mock_os.name = 'posix' data = mock_info.return_value data.file_format = 'raw' data.backing_file = None temp_file = mock_temp.return_value.__enter__.return_value mock_engine = mock.Mock(spec=fakeEngine) mock_get_engine.return_value = mock_engine output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path) self.assertIsNone(output) mock_convert.assert_called_once_with(volume_path, temp_file, 'raw', compress=True, run_as_root=True) mock_info.assert_called_with(temp_file, run_as_root=True) self.assertEqual(2, mock_info.call_count) mock_open.assert_called_once_with(temp_file, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) mock_engine.compress_img.assert_called() @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.utils.temporary_chown') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_same_format_on_nt(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_chown, mock_proxy): ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': 'raw', 'container_format': 'bare'} volume_path = mock.sentinel.volume_path mock_os.name = 'nt' mock_os.access.return_value = False output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path) self.assertIsNone(output) self.assertFalse(mock_convert.called) self.assertFalse(mock_info.called) mock_open.assert_called_once_with(volume_path, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) @mock.patch('cinder.image.accelerator.ImageAccel._get_engine') @mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready', return_value = True) @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.utils.temporary_chown') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_same_format_on_nt_compressed(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_chown, mock_proxy, mock_engine_ready, mock_get_engine): class fakeEngine(object): def __init__(self): pass def compress_img(self, src, dest, run_as_root): pass ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': 'raw', 'container_format': 'compressed'} mock_conf.allow_compression_on_image_upload = True volume_path = mock.sentinel.volume_path mock_os.name = 'posix' data = mock_info.return_value data.file_format = 'raw' data.backing_file = None temp_file = mock_temp.return_value.__enter__.return_value mock_engine = mock.Mock(spec=fakeEngine) mock_get_engine.return_value = mock_engine output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path) self.assertIsNone(output) mock_convert.assert_called_once_with(volume_path, temp_file, 'raw', compress=True, run_as_root=True) mock_info.assert_called_with(temp_file, run_as_root=True) self.assertEqual(2, mock_info.call_count) mock_open.assert_called_once_with(temp_file, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) mock_engine.compress_img.assert_called() @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_convert_error(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf): ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': mock.sentinel.disk_format, 'container_format': mock.sentinel.container_format} volume_path = mock.sentinel.volume_path mock_os.name = 'posix' data = mock_info.return_value data.file_format = mock.sentinel.other_disk_format data.backing_file = None temp_file = mock_temp.return_value.__enter__.return_value self.assertRaises(exception.ImageUnacceptable, image_utils.upload_volume, ctxt, image_service, image_meta, volume_path) mock_convert.assert_called_once_with(volume_path, temp_file, mock.sentinel.disk_format, run_as_root=True, compress=True) mock_info.assert_called_with(temp_file, run_as_root=True) self.assertEqual(2, mock_info.call_count) self.assertFalse(image_service.update.called) class TestFetchToVhd(test.TestCase): @mock.patch('cinder.image.image_utils.fetch_to_volume_format') def test_defaults(self, mock_fetch_to): ctxt = mock.sentinel.context image_service = mock.sentinel.image_service image_id = mock.sentinel.image_id dest = mock.sentinel.dest blocksize = mock.sentinel.blocksize out_subformat = 'fake_subformat' output = image_utils.fetch_to_vhd(ctxt, image_service, image_id, dest, blocksize, volume_subformat=out_subformat) self.assertIsNone(output) mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id, dest, 'vpc', blocksize, volume_subformat=out_subformat, user_id=None, project_id=None, run_as_root=True) @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.fetch_to_volume_format') def test_kwargs(self, mock_fetch_to, mock_check_space): ctxt = mock.sentinel.context image_service = mock.sentinel.image_service image_id = mock.sentinel.image_id dest = mock.sentinel.dest blocksize = mock.sentinel.blocksize user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id run_as_root = mock.sentinel.run_as_root out_subformat = 'fake_subformat' output = image_utils.fetch_to_vhd(ctxt, image_service, image_id, dest, blocksize, user_id=user_id, project_id=project_id, run_as_root=run_as_root, volume_subformat=out_subformat) self.assertIsNone(output) mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id, dest, 'vpc', blocksize, volume_subformat=out_subformat, user_id=user_id, project_id=project_id, run_as_root=run_as_root) class TestFetchToRaw(test.TestCase): @mock.patch('cinder.image.image_utils.fetch_to_volume_format') def test_defaults(self, mock_fetch_to): ctxt = mock.sentinel.context image_service = mock.sentinel.image_service image_id = mock.sentinel.image_id dest = mock.sentinel.dest blocksize = mock.sentinel.blocksize output = image_utils.fetch_to_raw(ctxt, image_service, image_id, dest, blocksize) self.assertIsNone(output) mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id, dest, 'raw', blocksize, user_id=None, project_id=None, size=None, run_as_root=True) @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.fetch_to_volume_format') def test_kwargs(self, mock_fetch_to, mock_check_space): ctxt = mock.sentinel.context image_service = mock.sentinel.image_service image_id = mock.sentinel.image_id dest = mock.sentinel.dest blocksize = mock.sentinel.blocksize user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = mock.sentinel.size run_as_root = mock.sentinel.run_as_root output = image_utils.fetch_to_raw(ctxt, image_service, image_id, dest, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id, dest, 'raw', blocksize, user_id=user_id, size=size, project_id=project_id, run_as_root=run_as_root) class FakeImageService(object): def __init__(self, db_driver=None, image_service=None, disk_format='raw'): self.temp_images = None self.disk_format = disk_format def show(self, context, image_id): return {'size': 2 * units.Gi, 'disk_format': self.disk_format, 'container_format': 'bare', 'status': 'active'} class TestFetchToVolumeFormat(test.TestCase): @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_defaults(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space): ctxt = mock.sentinel.context ctxt.user_id = mock.sentinel.user_id image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value output = image_utils.fetch_to_volume_format(ctxt, image_service, image_id, dest, volume_format, blocksize) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=True), mock.call(tmp, run_as_root=True)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, None, None) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=True, src_format='raw') @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_kwargs(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=run_as_root, src_format='raw') mock_check_size.assert_called_once_with(data.virtual_size, size, image_id) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=True) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_convert_from_vhd(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value image_service = FakeImageService(disk_format='vhd') expect_format = 'vpc' output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) mock_repl_xen.assert_called_once_with(tmp) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=run_as_root, src_format=expect_format) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_convert_from_iso(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_copy, mock_convert, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value image_service = FakeImageService(disk_format='iso') expect_format = 'raw' output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=run_as_root, src_format=expect_format) @mock.patch('cinder.image.image_utils.check_available_space', new=mock.Mock()) @mock.patch('cinder.image.image_utils.is_xenserver_format', new=mock.Mock(return_value=False)) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_temporary_images(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context ctxt.user_id = mock.sentinel.user_id image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock.sentinel.tmp dummy = mock.sentinel.dummy mock_temp.return_value.__enter__.side_effect = [tmp, dummy] with image_utils.TemporaryImages.fetch(image_service, ctxt, image_id) as tmp_img: self.assertEqual(tmp_img, tmp) output = image_utils.fetch_to_volume_format(ctxt, image_service, image_id, dest, volume_format, blocksize) self.assertIsNone(output) self.assertEqual(2, mock_temp.call_count) mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=True), mock.call(dummy, force_share=False, run_as_root=True), mock.call(tmp, run_as_root=True)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, None, None) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=True, src_format='raw') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_no_qemu_img_and_is_raw(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root tmp = mock_temp.return_value.__enter__.return_value image_service.show.return_value = {'disk_format': 'raw', 'size': 41126400} image_size_m = math.ceil(float(41126400) / units.Mi) output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_called_once_with(tmp, force_share=False, run_as_root=run_as_root) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) mock_copy.assert_called_once_with(tmp, dest, image_size_m, blocksize) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_no_qemu_img_not_raw(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root tmp = mock_temp.return_value.__enter__.return_value image_service.show.return_value = {'disk_format': 'not_raw'} self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_called_once_with(tmp, force_share=False, run_as_root=run_as_root) self.assertFalse(mock_fetch.called) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_no_qemu_img_no_metadata(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root tmp = mock_temp.return_value.__enter__.return_value image_service.show.return_value = None self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_called_once_with(tmp, force_share=False, run_as_root=run_as_root) self.assertFalse(mock_fetch.called) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_size_error(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_size): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 1234 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = int(1234.5 * units.Gi) tmp = mock_temp.return_value.__enter__.return_value mock_check_size.side_effect = exception.ImageUnacceptable( image_id='fake_image_id', reason='test') self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_qemu_img_parse_error(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = None data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_backing_file_error(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = mock.sentinel.backing_file data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=True) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_xenserver_to_vhd(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) mock_repl_xen.assert_called_once_with(tmp) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=None, run_as_root=run_as_root, src_format='raw') @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_no_qemu_img_fetch_verify_image(self, mock_conf, mock_temp, mock_info, mock_fetch): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root image_service.show.return_value = {'disk_format': 'raw', 'size': 41126400} image_utils.fetch_verify_image( ctxt, image_service, image_id, dest, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_info.assert_called_once_with(dest, force_share=False, run_as_root=run_as_root) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, dest, None, None) @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_get_qemu_data_returns_none(self, mock_conf, mock_temp, mock_info): image_id = mock.sentinel.image_id dest = mock.sentinel.dest run_as_root = mock.sentinel.run_as_root disk_format_raw = True has_meta = True output = image_utils.get_qemu_data(image_id, has_meta, disk_format_raw, dest, run_as_root=run_as_root) self.assertIsNone(output) @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_get_qemu_data_with_image_meta_exception(self, mock_conf, mock_temp, mock_info): image_id = mock.sentinel.image_id dest = mock.sentinel.dest run_as_root = mock.sentinel.run_as_root disk_format_raw = False has_meta = True self.assertRaises( exception.ImageUnacceptable, image_utils.get_qemu_data, image_id, has_meta, disk_format_raw, dest, run_as_root=run_as_root) @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_get_qemu_data_without_image_meta_except(self, mock_conf, mock_temp, mock_info): image_id = mock.sentinel.image_id dest = mock.sentinel.dest run_as_root = mock.sentinel.run_as_root disk_format_raw = False has_meta = False self.assertRaises( exception.ImageUnacceptable, image_utils.get_qemu_data, image_id, has_meta, disk_format_raw, dest, run_as_root=run_as_root) @mock.patch('cinder.image.accelerator.is_gzip_compressed', return_value = True) @mock.patch('cinder.image.accelerator.ImageAccel._get_engine') @mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready', return_value = True) @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_defaults_compressed(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space, mock_engine_ready, mock_get_engine, mock_gzip_compressed): class fakeEngine(object): def __init__(self): pass def decompress_img(self, src, dest, run_as_root): pass class FakeImageService(object): def __init__(self, db_driver=None, image_service=None, disk_format='raw'): self.temp_images = None self.disk_format = disk_format def show(self, context, image_id): return {'size': 2 * units.Gi, 'disk_format': self.disk_format, 'container_format': 'compressed', 'status': 'active'} ctxt = mock.sentinel.context ctxt.user_id = mock.sentinel.user_id image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value mock_engine = mock.Mock(spec=fakeEngine) mock_get_engine.return_value = mock_engine output = image_utils.fetch_to_volume_format(ctxt, image_service, image_id, dest, volume_format, blocksize) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=True), mock.call(tmp, run_as_root=True)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, None, None) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=True, src_format='raw') mock_engine.decompress_img.assert_called() class TestXenserverUtils(test.TestCase): def test_is_xenserver_format(self): image_meta1 = {'disk_format': 'vhd', 'container_format': 'ovf'} self.assertTrue(image_utils.is_xenserver_format(image_meta1)) image_meta2 = {'disk_format': 'test_disk_format', 'container_format': 'test_cont_format'} self.assertFalse(image_utils.is_xenserver_format(image_meta2)) @mock.patch('cinder.image.image_utils.utils.execute') def test_extract_targz(self, mock_exec): name = mock.sentinel.archive_name target = mock.sentinel.target output = image_utils.extract_targz(name, target) mock_exec.assert_called_once_with('tar', '-xzf', name, '-C', target) self.assertIsNone(output) class TestVhdUtils(test.TestCase): @mock.patch('cinder.image.image_utils.utils.execute') def test_set_vhd_parent(self, mock_exec): vhd_path = mock.sentinel.vhd_path parentpath = mock.sentinel.parentpath output = image_utils.set_vhd_parent(vhd_path, parentpath) mock_exec.assert_called_once_with('vhd-util', 'modify', '-n', vhd_path, '-p', parentpath) self.assertIsNone(output) @mock.patch('cinder.image.image_utils.set_vhd_parent') def test_fix_vhd_chain(self, mock_set_parent): vhd_chain = (mock.sentinel.first, mock.sentinel.second, mock.sentinel.third, mock.sentinel.fourth, mock.sentinel.fifth) output = image_utils.fix_vhd_chain(vhd_chain) self.assertIsNone(output) mock_set_parent.assert_has_calls([ mock.call(mock.sentinel.first, mock.sentinel.second), mock.call(mock.sentinel.second, mock.sentinel.third), mock.call(mock.sentinel.third, mock.sentinel.fourth), mock.call(mock.sentinel.fourth, mock.sentinel.fifth)]) @mock.patch('cinder.image.image_utils.utils.execute', return_value=(98765.43210, mock.sentinel.error)) def test_get_vhd_size(self, mock_exec): vhd_path = mock.sentinel.vhd_path output = image_utils.get_vhd_size(vhd_path) mock_exec.assert_called_once_with('vhd-util', 'query', '-n', vhd_path, '-v') self.assertEqual(98765, output) @mock.patch('cinder.image.image_utils.utils.execute') def test_resize_vhd(self, mock_exec): vhd_path = mock.sentinel.vhd_path size = 387549349 journal = mock.sentinel.journal output = image_utils.resize_vhd(vhd_path, size, journal) self.assertIsNone(output) mock_exec.assert_called_once_with('vhd-util', 'resize', '-n', vhd_path, '-s', str(size), '-j', journal) @mock.patch('cinder.image.image_utils.utils.execute') def test_coalesce_vhd(self, mock_exec): vhd_path = mock.sentinel.vhd_path output = image_utils.coalesce_vhd(vhd_path) self.assertIsNone(output) mock_exec.assert_called_once_with('vhd-util', 'coalesce', '-n', vhd_path) @mock.patch('cinder.image.image_utils.temporary_dir') @mock.patch('cinder.image.image_utils.coalesce_vhd') @mock.patch('cinder.image.image_utils.resize_vhd') @mock.patch('cinder.image.image_utils.get_vhd_size') @mock.patch('cinder.image.image_utils.utils.execute') def test_coalesce_chain(self, mock_exec, mock_size, mock_resize, mock_coal, mock_temp): vhd_chain = (mock.sentinel.first, mock.sentinel.second, mock.sentinel.third, mock.sentinel.fourth, mock.sentinel.fifth) # os.path.join does not work with MagicMock objects on Windows. mock_temp.return_value.__enter__.return_value = 'fake_temp_dir' output = image_utils.coalesce_chain(vhd_chain) self.assertEqual(mock.sentinel.fifth, output) mock_size.assert_has_calls([ mock.call(mock.sentinel.first), mock.call(mock.sentinel.second), mock.call(mock.sentinel.third), mock.call(mock.sentinel.fourth)]) mock_resize.assert_has_calls([ mock.call(mock.sentinel.second, mock_size.return_value, mock.ANY), mock.call(mock.sentinel.third, mock_size.return_value, mock.ANY), mock.call(mock.sentinel.fourth, mock_size.return_value, mock.ANY), mock.call(mock.sentinel.fifth, mock_size.return_value, mock.ANY)]) mock_coal.assert_has_calls([ mock.call(mock.sentinel.first), mock.call(mock.sentinel.second), mock.call(mock.sentinel.third), mock.call(mock.sentinel.fourth)]) @mock.patch('cinder.image.image_utils.os.path') def test_discover_vhd_chain(self, mock_path): directory = '/some/test/directory' mock_path.join.side_effect = lambda x, y: '/'.join((x, y)) mock_path.exists.side_effect = (True, True, True, False) output = image_utils.discover_vhd_chain(directory) expected_output = ['/some/test/directory/0.vhd', '/some/test/directory/1.vhd', '/some/test/directory/2.vhd'] self.assertEqual(expected_output, output) @mock.patch('cinder.image.image_utils.temporary_dir') @mock.patch('cinder.image.image_utils.os.rename') @mock.patch('cinder.image.image_utils.fileutils.delete_if_exists') @mock.patch('cinder.image.image_utils.coalesce_chain') @mock.patch('cinder.image.image_utils.fix_vhd_chain') @mock.patch('cinder.image.image_utils.discover_vhd_chain') @mock.patch('cinder.image.image_utils.extract_targz') def test_replace_xenserver_image_with_coalesced_vhd( self, mock_targz, mock_discover, mock_fix, mock_coal, mock_delete, mock_rename, mock_temp): image_file = mock.sentinel.image_file tmp = mock_temp.return_value.__enter__.return_value output = image_utils.replace_xenserver_image_with_coalesced_vhd( image_file) self.assertIsNone(output) mock_targz.assert_called_once_with(image_file, tmp) mock_discover.assert_called_once_with(tmp) mock_fix.assert_called_once_with(mock_discover.return_value) mock_coal.assert_called_once_with(mock_discover.return_value) mock_delete.assert_called_once_with(image_file) mock_rename.assert_called_once_with(mock_coal.return_value, image_file) class TestCreateTemporaryFile(test.TestCase): @mock.patch('cinder.image.image_utils.os.close') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.makedirs') @mock.patch('cinder.image.image_utils.tempfile.mkstemp') def test_create_temporary_file_no_dir(self, mock_mkstemp, mock_dirs, mock_conf, mock_close): mock_conf.image_conversion_dir = None fd = mock.sentinel.file_descriptor path = mock.sentinel.absolute_pathname mock_mkstemp.return_value = (fd, path) output = image_utils.create_temporary_file() self.assertEqual(path, output) mock_mkstemp.assert_called_once_with(dir=None) mock_close.assert_called_once_with(fd) @mock.patch('cinder.image.image_utils.os.close') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.makedirs') @mock.patch('cinder.image.image_utils.tempfile.mkstemp') def test_create_temporary_file_with_dir(self, mock_mkstemp, mock_dirs, mock_conf, mock_close): conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir fd = mock.sentinel.file_descriptor path = mock.sentinel.absolute_pathname mock_mkstemp.return_value = (fd, path) output = image_utils.create_temporary_file() self.assertEqual(path, output) self.assertTrue(mock_dirs.called) mock_mkstemp.assert_called_once_with(dir=conv_dir) mock_close.assert_called_once_with(fd) @mock.patch('cinder.image.image_utils.os.close') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.tempfile.mkstemp') def test_create_temporary_file_and_dir(self, mock_mkstemp, mock_dirs, mock_conf, mock_close): conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir fd = mock.sentinel.file_descriptor path = mock.sentinel.absolute_pathname mock_mkstemp.return_value = (fd, path) output = image_utils.create_temporary_file() self.assertEqual(path, output) mock_dirs.assert_called_once_with(conv_dir) mock_mkstemp.assert_called_once_with(dir=conv_dir) mock_close.assert_called_once_with(fd) @mock.patch('cinder.image.image_utils.os.remove') @mock.patch('cinder.image.image_utils.os.path.join') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.listdir') @mock.patch('cinder.image.image_utils.os.path.exists', return_value=True) def test_cleanup_temporary_file(self, mock_path, mock_listdir, mock_conf, mock_join, mock_remove): mock_listdir.return_value = ['tmphost@backend1', 'tmphost@backend2'] conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir mock_join.return_value = '/test/tmp/tmphost@backend1' image_utils.cleanup_temporary_file('host@backend1') mock_listdir.assert_called_once_with(conv_dir) mock_remove.assert_called_once_with('/test/tmp/tmphost@backend1') @mock.patch('cinder.image.image_utils.os.remove') @mock.patch('cinder.image.image_utils.os.listdir') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.path.exists', return_value=False) def test_cleanup_temporary_file_with_not_exist_path(self, mock_path, mock_conf, mock_listdir, mock_remove): conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir image_utils.cleanup_temporary_file('host@backend1') self.assertFalse(mock_listdir.called) self.assertFalse(mock_remove.called) @mock.patch('cinder.image.image_utils.os.remove') @mock.patch('cinder.image.image_utils.os.path.join') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.listdir') @mock.patch('cinder.image.image_utils.os.path.exists', return_value=True) def test_cleanup_temporary_file_with_exception(self, mock_path, mock_listdir, mock_conf, mock_join, mock_remove): mock_listdir.return_value = ['tmphost@backend1', 'tmphost@backend2'] conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir mock_join.return_value = '/test/tmp/tmphost@backend1' mock_remove.side_effect = OSError image_utils.cleanup_temporary_file('host@backend1') mock_listdir.assert_called_once_with(conv_dir) mock_remove.assert_called_once_with('/test/tmp/tmphost@backend1') class TestTemporaryFileContextManager(test.TestCase): @mock.patch('cinder.image.image_utils.create_temporary_file', return_value=mock.sentinel.temporary_file) @mock.patch('cinder.image.image_utils.fileutils.delete_if_exists') def test_temporary_file(self, mock_delete, mock_create): with image_utils.temporary_file() as tmp_file: self.assertEqual(mock.sentinel.temporary_file, tmp_file) self.assertFalse(mock_delete.called) mock_delete.assert_called_once_with(mock.sentinel.temporary_file) class TestImageUtils(test.TestCase): def test_get_virtual_size(self): image_id = fake.IMAGE_ID virtual_size = 1073741824 volume_size = 2 virt_size = image_utils.check_virtual_size(virtual_size, volume_size, image_id) self.assertEqual(1, virt_size) def test_get_bigger_virtual_size(self): image_id = fake.IMAGE_ID virtual_size = 3221225472 volume_size = 2 self.assertRaises(exception.ImageUnacceptable, image_utils.check_virtual_size, virtual_size, volume_size, image_id) def test_decode_cipher(self): expected = {'cipher_alg': 'aes-256', 'cipher_mode': 'xts', 'ivgen_alg': 'essiv'} result = image_utils.decode_cipher('aes-xts-essiv', 256) self.assertEqual(expected, result)
45.847704
79
0.621296
import errno import math import cryptography import ddt import mock from oslo_concurrency import processutils from oslo_utils import units from six.moves import builtins from cinder import exception from cinder.image import image_utils from cinder import test from cinder.tests.unit import fake_constants as fake from cinder.volume import throttling class TestQemuImgInfo(test.TestCase): @mock.patch('os.name', new='posix') @mock.patch('oslo_utils.imageutils.QemuImgInfo') @mock.patch('cinder.utils.execute') def test_qemu_img_info(self, mock_exec, mock_info): mock_out = mock.sentinel.out mock_err = mock.sentinel.err test_path = mock.sentinel.path mock_exec.return_value = (mock_out, mock_err) output = image_utils.qemu_img_info(test_path) mock_exec.assert_called_once_with('env', 'LC_ALL=C', 'qemu-img', 'info', test_path, run_as_root=True, prlimit=image_utils.QEMU_IMG_LIMITS) self.assertEqual(mock_info.return_value, output) @mock.patch('os.name', new='posix') @mock.patch('oslo_utils.imageutils.QemuImgInfo') @mock.patch('cinder.utils.execute') def test_qemu_img_info_not_root(self, mock_exec, mock_info): mock_out = mock.sentinel.out mock_err = mock.sentinel.err test_path = mock.sentinel.path mock_exec.return_value = (mock_out, mock_err) output = image_utils.qemu_img_info(test_path, force_share=False, run_as_root=False) mock_exec.assert_called_once_with('env', 'LC_ALL=C', 'qemu-img', 'info', test_path, run_as_root=False, prlimit=image_utils.QEMU_IMG_LIMITS) self.assertEqual(mock_info.return_value, output) @mock.patch('cinder.image.image_utils.os') @mock.patch('oslo_utils.imageutils.QemuImgInfo') @mock.patch('cinder.utils.execute') def test_qemu_img_info_on_nt(self, mock_exec, mock_info, mock_os): mock_out = mock.sentinel.out mock_err = mock.sentinel.err test_path = mock.sentinel.path mock_exec.return_value = (mock_out, mock_err) mock_os.name = 'nt' output = image_utils.qemu_img_info(test_path) mock_exec.assert_called_once_with('qemu-img', 'info', test_path, run_as_root=True, prlimit=image_utils.QEMU_IMG_LIMITS) self.assertEqual(mock_info.return_value, output) @mock.patch('cinder.utils.execute') def test_get_qemu_img_version(self, mock_exec): mock_out = "qemu-img version 2.0.0" mock_err = mock.sentinel.err mock_exec.return_value = (mock_out, mock_err) expected_version = [2, 0, 0] version = image_utils.get_qemu_img_version() mock_exec.assert_called_once_with('qemu-img', '--version', check_exit_code=False) self.assertEqual(expected_version, version) @mock.patch.object(image_utils, 'get_qemu_img_version') def test_validate_qemu_img_version(self, mock_get_qemu_img_version): fake_current_version = [1, 8] mock_get_qemu_img_version.return_value = fake_current_version minimum_version = '1.8' image_utils.check_qemu_img_version(minimum_version) mock_get_qemu_img_version.assert_called_once_with() @mock.patch.object(image_utils, 'get_qemu_img_version') def _test_validate_unsupported_qemu_img_version(self, mock_get_qemu_img_version, current_version=None): mock_get_qemu_img_version.return_value = current_version minimum_version = '2.0' self.assertRaises(exception.VolumeBackendAPIException, image_utils.check_qemu_img_version, minimum_version) mock_get_qemu_img_version.assert_called_once_with() def test_validate_qemu_img_version_not_installed(self): self._test_validate_unsupported_qemu_img_version() def test_validate_older_qemu_img_version(self): self._test_validate_unsupported_qemu_img_version( current_version=[1, 8]) @ddt.ddt class TestConvertImage(test.TestCase): @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=True) def test_defaults_block_dev_with_size_info(self, mock_isblk, mock_exec, mock_info): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format mock_info.return_value.virtual_size = 1048576 throttle = throttling.Throttle(prefix=['cgcmd']) with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True): output = image_utils.convert_image(source, dest, out_format, throttle=throttle) self.assertIsNone(output) mock_exec.assert_called_once_with('cgcmd', 'qemu-img', 'convert', '-O', out_format, '-t', 'none', source, dest, run_as_root=True) mock_exec.reset_mock() with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=False): output = image_utils.convert_image(source, dest, out_format) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, source, dest, run_as_root=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=True) def test_defaults_block_dev_without_size_info(self, mock_isblk, mock_exec, mock_info): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format mock_info.side_effect = ValueError throttle = throttling.Throttle(prefix=['cgcmd']) with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True): output = image_utils.convert_image(source, dest, out_format, throttle=throttle) mock_info.assert_called_once_with(source, run_as_root=True) self.assertIsNone(output) mock_exec.assert_called_once_with('cgcmd', 'qemu-img', 'convert', '-O', out_format, '-t', 'none', source, dest, run_as_root=True) mock_exec.reset_mock() with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=False): output = image_utils.convert_image(source, dest, out_format) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, source, dest, run_as_root=True) @mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) def test_defaults_not_block_dev_with_size_info(self, mock_isblk, mock_exec, mock_info, mock_odirect): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format out_subformat = 'fake_subformat' mock_info.return_value.virtual_size = 1048576 output = image_utils.convert_image(source, dest, out_format, out_subformat=out_subformat) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, '-o', 'subformat=%s' % out_subformat, source, dest, run_as_root=True) @mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) def test_defaults_not_block_dev_without_size_info(self, mock_isblk, mock_exec, mock_info, mock_odirect): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format out_subformat = 'fake_subformat' mock_info.side_effect = ValueError output = image_utils.convert_image(source, dest, out_format, out_subformat=out_subformat) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, '-o', 'subformat=%s' % out_subformat, source, dest, run_as_root=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=True) def test_defaults_block_dev_ami_img(self, mock_isblk, mock_exec, mock_info): source = mock.sentinel.source dest = mock.sentinel.dest out_format = mock.sentinel.out_format mock_info.return_value.virtual_size = 1048576 with mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True): output = image_utils.convert_image(source, dest, out_format, src_format='AMI') self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', out_format, '-t', 'none', source, dest, run_as_root=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) @mock.patch('cinder.volume.volume_utils.check_for_odirect_support') def test_convert_to_vhd(self, mock_check_odirect, mock_isblk, mock_exec, mock_info): source = mock.sentinel.source dest = mock.sentinel.dest out_format = "vhd" mock_info.return_value.virtual_size = 1048576 output = image_utils.convert_image(source, dest, out_format) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'convert', '-O', 'vpc', source, dest, run_as_root=True) @ddt.data(True, False) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) def test_convert_to_qcow2(self, compress_option, mock_isblk, mock_exec, mock_info): self.override_config('image_compress_on_upload', compress_option) source = mock.sentinel.source dest = mock.sentinel.dest out_format = 'qcow2' mock_info.return_value.virtual_size = 1048576 image_utils.convert_image(source, dest, out_format, compress=True) exec_args = ['qemu-img', 'convert', '-O', 'qcow2'] if compress_option: exec_args.append('-c') exec_args.extend((source, dest)) mock_exec.assert_called_once_with(*exec_args, run_as_root=True) @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.volume.volume_utils.check_for_odirect_support', return_value=True) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.utils.execute') @mock.patch('cinder.utils.is_blk_device', return_value=False) @mock.patch('os.path.dirname', return_value='fakedir') @mock.patch('os.path.ismount', return_value=True) @mock.patch('oslo_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.utils.tempdir') @mock.patch.object(image_utils.LOG, 'error') def test_not_enough_conversion_space(self, mock_log, mock_tempdir, mock_make, mock_ismount, mock_dirname, mock_isblk, mock_exec, mock_info, mock_odirect, mock_conf): source = mock.sentinel.source mock_conf.image_conversion_dir = 'fakedir' dest = [mock_conf.image_conversion_dir] out_format = mock.sentinel.out_format mock_info.side_effect = ValueError mock_exec.side_effect = processutils.ProcessExecutionError( stderr='No space left on device') self.assertRaises(processutils.ProcessExecutionError, image_utils.convert_image, source, dest, out_format) mock_log.assert_called_with('Insufficient free space on fakedir for' ' image conversion.') class TestResizeImage(test.TestCase): @mock.patch('cinder.utils.execute') def test_defaults(self, mock_exec): source = mock.sentinel.source size = mock.sentinel.size output = image_utils.resize_image(source, size) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'resize', source, 'sentinel.sizeG', run_as_root=False) @mock.patch('cinder.utils.execute') def test_run_as_root(self, mock_exec): source = mock.sentinel.source size = mock.sentinel.size output = image_utils.resize_image(source, size, run_as_root=True) self.assertIsNone(output) mock_exec.assert_called_once_with('qemu-img', 'resize', source, 'sentinel.sizeG', run_as_root=True) class TestFetch(test.TestCase): @mock.patch('eventlet.tpool.Proxy') @mock.patch('os.stat') @mock.patch('cinder.image.image_utils.fileutils') def test_defaults(self, mock_fileutils, mock_stat, mock_proxy): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id path = 'test_path' _user_id = mock.sentinel._user_id _project_id = mock.sentinel._project_id mock_open = mock.mock_open() mock_stat.return_value.st_size = 1048576 with mock.patch('cinder.image.image_utils.open', new=mock_open, create=True): output = image_utils.fetch(ctxt, image_service, image_id, path, _user_id, _project_id) self.assertIsNone(output) mock_proxy.assert_called_once_with(mock_open.return_value) image_service.download.assert_called_once_with(ctxt, image_id, mock_proxy.return_value) mock_open.assert_called_once_with(path, 'wb') mock_fileutils.remove_path_on_error.assert_called_once_with(path) (mock_fileutils.remove_path_on_error.return_value.__enter__ .assert_called_once_with()) (mock_fileutils.remove_path_on_error.return_value.__exit__ .assert_called_once_with(None, None, None)) def test_fetch_enospc(self): context = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id e = exception.ImageTooBig(image_id=image_id, reason = "fake") e.errno = errno.ENOSPC image_service.download.side_effect = e path = '/test_path' _user_id = mock.sentinel._user_id _project_id = mock.sentinel._project_id with mock.patch('cinder.image.image_utils.open', new=mock.mock_open(), create=True): self.assertRaises(exception.ImageTooBig, image_utils.fetch, context, image_service, image_id, path, _user_id, _project_id) def test_fetch_ioerror(self): context = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id e = IOError() e.errno = errno.ECONNRESET e.strerror = 'Some descriptive message' image_service.download.side_effect = e path = '/test_path' _user_id = mock.sentinel._user_id _project_id = mock.sentinel._project_id with mock.patch('cinder.image.image_utils.open', new=mock.mock_open(), create=True): self.assertRaisesRegex(exception.ImageDownloadFailed, e.strerror, image_utils.fetch, context, image_service, image_id, path, _user_id, _project_id) class MockVerifier(object): def update(self, data): return def verify(self): return True class BadVerifier(object): def update(self, data): return def verify(self): raise cryptography.exceptions.InvalidSignature( 'Invalid signature.' ) class TestVerifyImageSignature(test.TestCase): @mock.patch('cursive.signature_utils.get_verifier') @mock.patch('oslo_utils.fileutils.remove_path_on_error') def test_image_signature_verify_failed(self, mock_remove, mock_get): self.mock_object(builtins, 'open', mock.mock_open()) ctxt = mock.sentinel.context metadata = {'name': 'test image', 'is_public': False, 'protected': False, 'properties': {'img_signature_certificate_uuid': 'fake_uuid', 'img_signature_hash_method': 'SHA-256', 'img_signature': 'signature', 'img_signature_key_type': 'RSA-PSS'}} class FakeImageService(object): def show(self, context, image_id): return metadata self.flags(verify_glance_signatures='enabled') mock_get.return_value = BadVerifier() self.assertRaises(exception.ImageSignatureVerificationException, image_utils.verify_glance_image_signature, ctxt, FakeImageService(), 'fake_id', 'fake_path') mock_get.assert_called_once_with( context=ctxt, img_signature_certificate_uuid='fake_uuid', img_signature_hash_method='SHA-256', img_signature='signature', img_signature_key_type='RSA-PSS') @mock.patch('cursive.signature_utils.get_verifier') def test_image_signature_metadata_missing(self, mock_get): ctxt = mock.sentinel.context metadata = {'name': 'test image', 'is_public': False, 'protected': False, 'properties': {}} class FakeImageService(object): def show(self, context, image_id): return metadata self.flags(verify_glance_signatures='enabled') result = image_utils.verify_glance_image_signature( ctxt, FakeImageService(), 'fake_id', 'fake_path') self.assertFalse(result) mock_get.assert_not_called() @mock.patch('cursive.signature_utils.get_verifier') def test_image_signature_metadata_incomplete(self, mock_get): ctxt = mock.sentinel.context metadata = {'name': 'test image', 'is_public': False, 'protected': False, 'properties': {'img_signature_certificate_uuid': None, 'img_signature_hash_method': 'SHA-256', 'img_signature': 'signature', 'img_signature_key_type': 'RSA-PSS'}} class FakeImageService(object): def show(self, context, image_id): return metadata self.flags(verify_glance_signatures='enabled') self.assertRaises(exception.InvalidSignatureImage, image_utils.verify_glance_image_signature, ctxt, FakeImageService(), 'fake_id', 'fake_path') mock_get.assert_not_called() @mock.patch('six.moves.builtins.open') @mock.patch('eventlet.tpool.execute') @mock.patch('cursive.signature_utils.get_verifier') @mock.patch('oslo_utils.fileutils.remove_path_on_error') def test_image_signature_verify_success(self, mock_remove, mock_get, mock_exec, mock_open): ctxt = mock.sentinel.context metadata = {'name': 'test image', 'is_public': False, 'protected': False, 'properties': {'img_signature_certificate_uuid': 'fake_uuid', 'img_signature_hash_method': 'SHA-256', 'img_signature': 'signature', 'img_signature_key_type': 'RSA-PSS'}} class FakeImageService(object): def show(self, context, image_id): return metadata self.flags(verify_glance_signatures='enabled') mock_get.return_value = MockVerifier() result = image_utils.verify_glance_image_signature( ctxt, FakeImageService(), 'fake_id', 'fake_path') self.assertTrue(result) mock_exec.assert_called_once_with( image_utils._verify_image, mock_open.return_value.__enter__.return_value, mock_get.return_value) mock_get.assert_called_once_with( context=ctxt, img_signature_certificate_uuid='fake_uuid', img_signature_hash_method='SHA-256', img_signature='signature', img_signature_key_type='RSA-PSS') class TestVerifyImage(test.TestCase): @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_defaults(self, mock_fetch, mock_fileutils, mock_info): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest mock_data = mock_info.return_value mock_data.file_format = 'test_format' mock_data.backing_file = None output = image_utils.fetch_verify_image(ctxt, image_service, image_id, dest) self.assertIsNone(output) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, dest, None, None) mock_info.assert_called_once_with(dest, run_as_root=True, force_share=False) mock_fileutils.remove_path_on_error.assert_called_once_with(dest) (mock_fileutils.remove_path_on_error.return_value.__enter__ .assert_called_once_with()) (mock_fileutils.remove_path_on_error.return_value.__exit__ .assert_called_once_with(None, None, None)) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_kwargs(self, mock_fetch, mock_fileutils, mock_info, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 2 run_as_root = mock.sentinel.run_as_root mock_data = mock_info.return_value mock_data.file_format = 'test_format' mock_data.backing_file = None mock_data.virtual_size = 1 output = image_utils.fetch_verify_image( ctxt, image_service, image_id, dest, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, dest, None, None) mock_fileutils.remove_path_on_error.assert_called_once_with(dest) (mock_fileutils.remove_path_on_error.return_value.__enter__ .assert_called_once_with()) (mock_fileutils.remove_path_on_error.return_value.__exit__ .assert_called_once_with(None, None, None)) mock_check_size.assert_called_once_with(mock_data.virtual_size, size, image_id) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_format_error(self, mock_fetch, mock_fileutils, mock_info): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest mock_data = mock_info.return_value mock_data.file_format = None mock_data.backing_file = None self.assertRaises(exception.ImageUnacceptable, image_utils.fetch_verify_image, ctxt, image_service, image_id, dest) @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_backing_file_error(self, mock_fetch, mock_fileutils, mock_info): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest mock_data = mock_info.return_value mock_data.file_format = 'test_format' mock_data.backing_file = 'test_backing_file' self.assertRaises(exception.ImageUnacceptable, image_utils.fetch_verify_image, ctxt, image_service, image_id, dest) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.fileutils') @mock.patch('cinder.image.image_utils.fetch') def test_size_error(self, mock_fetch, mock_fileutils, mock_info, mock_check_size): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest size = 1 mock_data = mock_info.return_value mock_data.file_format = 'test_format' mock_data.backing_file = None mock_data.virtual_size = 2 * units.Gi mock_check_size.side_effect = exception.ImageUnacceptable( image_id='fake_image_id', reason='test') self.assertRaises(exception.ImageUnacceptable, image_utils.fetch_verify_image, ctxt, image_service, image_id, dest, size=size) class TestTemporaryDir(test.TestCase): @mock.patch('cinder.image.image_utils.CONF') @mock.patch('oslo_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.utils.tempdir') def test_conv_dir_exists(self, mock_tempdir, mock_make, mock_conf): mock_conf.image_conversion_dir = mock.sentinel.conv_dir output = image_utils.temporary_dir() self.assertTrue(mock_make.called) mock_tempdir.assert_called_once_with(dir=mock.sentinel.conv_dir) self.assertEqual(output, mock_tempdir.return_value) @mock.patch('cinder.image.image_utils.CONF') @mock.patch('oslo_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.utils.tempdir') def test_create_conv_dir(self, mock_tempdir, mock_make, mock_conf): mock_conf.image_conversion_dir = mock.sentinel.conv_dir output = image_utils.temporary_dir() mock_make.assert_called_once_with(mock.sentinel.conv_dir) mock_tempdir.assert_called_once_with(dir=mock.sentinel.conv_dir) self.assertEqual(output, mock_tempdir.return_value) @mock.patch('cinder.image.image_utils.CONF') @mock.patch('oslo_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.utils.tempdir') def test_no_conv_dir(self, mock_tempdir, mock_make, mock_conf): mock_conf.image_conversion_dir = None output = image_utils.temporary_dir() self.assertTrue(mock_make.called) mock_tempdir.assert_called_once_with(dir=None) self.assertEqual(output, mock_tempdir.return_value) @ddt.ddt class TestUploadVolume(test.TestCase): @ddt.data((mock.sentinel.disk_format, mock.sentinel.disk_format, True), (mock.sentinel.disk_format, mock.sentinel.disk_format, False), ('ploop', 'parallels', True), ('ploop', 'parallels', False)) @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_diff_format(self, image_format, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_proxy): input_format, output_format, do_compress = image_format ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': input_format, 'container_format': mock.sentinel.container_format} volume_path = mock.sentinel.volume_path mock_os.name = 'posix' data = mock_info.return_value data.file_format = output_format data.backing_file = None temp_file = mock_temp.return_value.__enter__.return_value output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path, compress=do_compress) self.assertIsNone(output) mock_convert.assert_called_once_with(volume_path, temp_file, output_format, run_as_root=True, compress=do_compress) mock_info.assert_called_with(temp_file, run_as_root=True) self.assertEqual(2, mock_info.call_count) mock_open.assert_called_once_with(temp_file, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.utils.temporary_chown') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_same_format(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_chown, mock_proxy): ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': 'raw', 'container_format': mock.sentinel.container_format} volume_path = mock.sentinel.volume_path mock_os.name = 'posix' mock_os.access.return_value = False output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path) self.assertIsNone(output) self.assertFalse(mock_convert.called) self.assertFalse(mock_info.called) mock_chown.assert_called_once_with(volume_path) mock_open.assert_called_once_with(volume_path, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) @mock.patch('cinder.image.accelerator.ImageAccel._get_engine') @mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready', return_value = True) @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.utils.temporary_chown') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_same_format_compressed(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_chown, mock_proxy, mock_engine_ready, mock_get_engine): class fakeEngine(object): def __init__(self): pass def compress_img(self, src, dest, run_as_root): pass ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': 'raw', 'container_format': 'compressed'} mock_conf.allow_compression_on_image_upload = True volume_path = mock.sentinel.volume_path mock_os.name = 'posix' data = mock_info.return_value data.file_format = 'raw' data.backing_file = None temp_file = mock_temp.return_value.__enter__.return_value mock_engine = mock.Mock(spec=fakeEngine) mock_get_engine.return_value = mock_engine output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path) self.assertIsNone(output) mock_convert.assert_called_once_with(volume_path, temp_file, 'raw', compress=True, run_as_root=True) mock_info.assert_called_with(temp_file, run_as_root=True) self.assertEqual(2, mock_info.call_count) mock_open.assert_called_once_with(temp_file, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) mock_engine.compress_img.assert_called() @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.utils.temporary_chown') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_same_format_on_nt(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_chown, mock_proxy): ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': 'raw', 'container_format': 'bare'} volume_path = mock.sentinel.volume_path mock_os.name = 'nt' mock_os.access.return_value = False output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path) self.assertIsNone(output) self.assertFalse(mock_convert.called) self.assertFalse(mock_info.called) mock_open.assert_called_once_with(volume_path, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) @mock.patch('cinder.image.accelerator.ImageAccel._get_engine') @mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready', return_value = True) @mock.patch('eventlet.tpool.Proxy') @mock.patch('cinder.image.image_utils.utils.temporary_chown') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_same_format_on_nt_compressed(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf, mock_chown, mock_proxy, mock_engine_ready, mock_get_engine): class fakeEngine(object): def __init__(self): pass def compress_img(self, src, dest, run_as_root): pass ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': 'raw', 'container_format': 'compressed'} mock_conf.allow_compression_on_image_upload = True volume_path = mock.sentinel.volume_path mock_os.name = 'posix' data = mock_info.return_value data.file_format = 'raw' data.backing_file = None temp_file = mock_temp.return_value.__enter__.return_value mock_engine = mock.Mock(spec=fakeEngine) mock_get_engine.return_value = mock_engine output = image_utils.upload_volume(ctxt, image_service, image_meta, volume_path) self.assertIsNone(output) mock_convert.assert_called_once_with(volume_path, temp_file, 'raw', compress=True, run_as_root=True) mock_info.assert_called_with(temp_file, run_as_root=True) self.assertEqual(2, mock_info.call_count) mock_open.assert_called_once_with(temp_file, 'rb') mock_proxy.assert_called_once_with( mock_open.return_value.__enter__.return_value) image_service.update.assert_called_once_with( ctxt, image_meta['id'], {}, mock_proxy.return_value) mock_engine.compress_img.assert_called() @mock.patch('cinder.image.image_utils.CONF') @mock.patch('six.moves.builtins.open') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.os') def test_convert_error(self, mock_os, mock_temp, mock_convert, mock_info, mock_open, mock_conf): ctxt = mock.sentinel.context image_service = mock.Mock() image_meta = {'id': 'test_id', 'disk_format': mock.sentinel.disk_format, 'container_format': mock.sentinel.container_format} volume_path = mock.sentinel.volume_path mock_os.name = 'posix' data = mock_info.return_value data.file_format = mock.sentinel.other_disk_format data.backing_file = None temp_file = mock_temp.return_value.__enter__.return_value self.assertRaises(exception.ImageUnacceptable, image_utils.upload_volume, ctxt, image_service, image_meta, volume_path) mock_convert.assert_called_once_with(volume_path, temp_file, mock.sentinel.disk_format, run_as_root=True, compress=True) mock_info.assert_called_with(temp_file, run_as_root=True) self.assertEqual(2, mock_info.call_count) self.assertFalse(image_service.update.called) class TestFetchToVhd(test.TestCase): @mock.patch('cinder.image.image_utils.fetch_to_volume_format') def test_defaults(self, mock_fetch_to): ctxt = mock.sentinel.context image_service = mock.sentinel.image_service image_id = mock.sentinel.image_id dest = mock.sentinel.dest blocksize = mock.sentinel.blocksize out_subformat = 'fake_subformat' output = image_utils.fetch_to_vhd(ctxt, image_service, image_id, dest, blocksize, volume_subformat=out_subformat) self.assertIsNone(output) mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id, dest, 'vpc', blocksize, volume_subformat=out_subformat, user_id=None, project_id=None, run_as_root=True) @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.fetch_to_volume_format') def test_kwargs(self, mock_fetch_to, mock_check_space): ctxt = mock.sentinel.context image_service = mock.sentinel.image_service image_id = mock.sentinel.image_id dest = mock.sentinel.dest blocksize = mock.sentinel.blocksize user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id run_as_root = mock.sentinel.run_as_root out_subformat = 'fake_subformat' output = image_utils.fetch_to_vhd(ctxt, image_service, image_id, dest, blocksize, user_id=user_id, project_id=project_id, run_as_root=run_as_root, volume_subformat=out_subformat) self.assertIsNone(output) mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id, dest, 'vpc', blocksize, volume_subformat=out_subformat, user_id=user_id, project_id=project_id, run_as_root=run_as_root) class TestFetchToRaw(test.TestCase): @mock.patch('cinder.image.image_utils.fetch_to_volume_format') def test_defaults(self, mock_fetch_to): ctxt = mock.sentinel.context image_service = mock.sentinel.image_service image_id = mock.sentinel.image_id dest = mock.sentinel.dest blocksize = mock.sentinel.blocksize output = image_utils.fetch_to_raw(ctxt, image_service, image_id, dest, blocksize) self.assertIsNone(output) mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id, dest, 'raw', blocksize, user_id=None, project_id=None, size=None, run_as_root=True) @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.fetch_to_volume_format') def test_kwargs(self, mock_fetch_to, mock_check_space): ctxt = mock.sentinel.context image_service = mock.sentinel.image_service image_id = mock.sentinel.image_id dest = mock.sentinel.dest blocksize = mock.sentinel.blocksize user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = mock.sentinel.size run_as_root = mock.sentinel.run_as_root output = image_utils.fetch_to_raw(ctxt, image_service, image_id, dest, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id, dest, 'raw', blocksize, user_id=user_id, size=size, project_id=project_id, run_as_root=run_as_root) class FakeImageService(object): def __init__(self, db_driver=None, image_service=None, disk_format='raw'): self.temp_images = None self.disk_format = disk_format def show(self, context, image_id): return {'size': 2 * units.Gi, 'disk_format': self.disk_format, 'container_format': 'bare', 'status': 'active'} class TestFetchToVolumeFormat(test.TestCase): @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_defaults(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space): ctxt = mock.sentinel.context ctxt.user_id = mock.sentinel.user_id image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value output = image_utils.fetch_to_volume_format(ctxt, image_service, image_id, dest, volume_format, blocksize) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=True), mock.call(tmp, run_as_root=True)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, None, None) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=True, src_format='raw') @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_kwargs(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=run_as_root, src_format='raw') mock_check_size.assert_called_once_with(data.virtual_size, size, image_id) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=True) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_convert_from_vhd(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value image_service = FakeImageService(disk_format='vhd') expect_format = 'vpc' output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) mock_repl_xen.assert_called_once_with(tmp) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=run_as_root, src_format=expect_format) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_convert_from_iso(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_copy, mock_convert, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value image_service = FakeImageService(disk_format='iso') expect_format = 'raw' output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=run_as_root, src_format=expect_format) @mock.patch('cinder.image.image_utils.check_available_space', new=mock.Mock()) @mock.patch('cinder.image.image_utils.is_xenserver_format', new=mock.Mock(return_value=False)) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_temporary_images(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context ctxt.user_id = mock.sentinel.user_id image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock.sentinel.tmp dummy = mock.sentinel.dummy mock_temp.return_value.__enter__.side_effect = [tmp, dummy] with image_utils.TemporaryImages.fetch(image_service, ctxt, image_id) as tmp_img: self.assertEqual(tmp_img, tmp) output = image_utils.fetch_to_volume_format(ctxt, image_service, image_id, dest, volume_format, blocksize) self.assertIsNone(output) self.assertEqual(2, mock_temp.call_count) mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=True), mock.call(dummy, force_share=False, run_as_root=True), mock.call(tmp, run_as_root=True)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, None, None) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=True, src_format='raw') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_no_qemu_img_and_is_raw(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root tmp = mock_temp.return_value.__enter__.return_value image_service.show.return_value = {'disk_format': 'raw', 'size': 41126400} image_size_m = math.ceil(float(41126400) / units.Mi) output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_called_once_with(tmp, force_share=False, run_as_root=run_as_root) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) mock_copy.assert_called_once_with(tmp, dest, image_size_m, blocksize) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_no_qemu_img_not_raw(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root tmp = mock_temp.return_value.__enter__.return_value image_service.show.return_value = {'disk_format': 'not_raw'} self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_called_once_with(tmp, force_share=False, run_as_root=run_as_root) self.assertFalse(mock_fetch.called) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_no_qemu_img_no_metadata(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root tmp = mock_temp.return_value.__enter__.return_value image_service.show.return_value = None self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_called_once_with(tmp, force_share=False, run_as_root=run_as_root) self.assertFalse(mock_fetch.called) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_size_error(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_size): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 1234 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = int(1234.5 * units.Gi) tmp = mock_temp.return_value.__enter__.return_value mock_check_size.side_effect = exception.ImageUnacceptable( image_id='fake_image_id', reason='test') self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_qemu_img_parse_error(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = None data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_backing_file_error(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = mock.sentinel.backing_file data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value self.assertRaises( exception.ImageUnacceptable, image_utils.fetch_to_volume_format, ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) self.assertFalse(mock_convert.called) @mock.patch('cinder.image.image_utils.check_virtual_size') @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=True) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_xenserver_to_vhd(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space, mock_check_size): ctxt = mock.sentinel.context image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format blocksize = mock.sentinel.blocksize ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value output = image_utils.fetch_to_volume_format( ctxt, image_service, image_id, dest, volume_format, blocksize, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=run_as_root), mock.call(tmp, run_as_root=run_as_root)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, user_id, project_id) mock_repl_xen.assert_called_once_with(tmp) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=None, run_as_root=run_as_root, src_format='raw') @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_no_qemu_img_fetch_verify_image(self, mock_conf, mock_temp, mock_info, mock_fetch): ctxt = mock.sentinel.context image_service = mock.Mock(temp_images=None) image_id = mock.sentinel.image_id dest = mock.sentinel.dest ctxt.user_id = user_id = mock.sentinel.user_id project_id = mock.sentinel.project_id size = 4321 run_as_root = mock.sentinel.run_as_root image_service.show.return_value = {'disk_format': 'raw', 'size': 41126400} image_utils.fetch_verify_image( ctxt, image_service, image_id, dest, user_id=user_id, project_id=project_id, size=size, run_as_root=run_as_root) image_service.show.assert_called_once_with(ctxt, image_id) mock_info.assert_called_once_with(dest, force_share=False, run_as_root=run_as_root) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, dest, None, None) @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_get_qemu_data_returns_none(self, mock_conf, mock_temp, mock_info): image_id = mock.sentinel.image_id dest = mock.sentinel.dest run_as_root = mock.sentinel.run_as_root disk_format_raw = True has_meta = True output = image_utils.get_qemu_data(image_id, has_meta, disk_format_raw, dest, run_as_root=run_as_root) self.assertIsNone(output) @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_get_qemu_data_with_image_meta_exception(self, mock_conf, mock_temp, mock_info): image_id = mock.sentinel.image_id dest = mock.sentinel.dest run_as_root = mock.sentinel.run_as_root disk_format_raw = False has_meta = True self.assertRaises( exception.ImageUnacceptable, image_utils.get_qemu_data, image_id, has_meta, disk_format_raw, dest, run_as_root=run_as_root) @mock.patch('cinder.image.image_utils.qemu_img_info', side_effect=processutils.ProcessExecutionError) @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_get_qemu_data_without_image_meta_except(self, mock_conf, mock_temp, mock_info): image_id = mock.sentinel.image_id dest = mock.sentinel.dest run_as_root = mock.sentinel.run_as_root disk_format_raw = False has_meta = False self.assertRaises( exception.ImageUnacceptable, image_utils.get_qemu_data, image_id, has_meta, disk_format_raw, dest, run_as_root=run_as_root) @mock.patch('cinder.image.accelerator.is_gzip_compressed', return_value = True) @mock.patch('cinder.image.accelerator.ImageAccel._get_engine') @mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready', return_value = True) @mock.patch('cinder.image.image_utils.check_available_space') @mock.patch('cinder.image.image_utils.convert_image') @mock.patch('cinder.image.image_utils.volume_utils.copy_volume') @mock.patch( 'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd') @mock.patch('cinder.image.image_utils.is_xenserver_format', return_value=False) @mock.patch('cinder.image.image_utils.fetch') @mock.patch('cinder.image.image_utils.qemu_img_info') @mock.patch('cinder.image.image_utils.temporary_file') @mock.patch('cinder.image.image_utils.CONF') def test_defaults_compressed(self, mock_conf, mock_temp, mock_info, mock_fetch, mock_is_xen, mock_repl_xen, mock_copy, mock_convert, mock_check_space, mock_engine_ready, mock_get_engine, mock_gzip_compressed): class fakeEngine(object): def __init__(self): pass def decompress_img(self, src, dest, run_as_root): pass class FakeImageService(object): def __init__(self, db_driver=None, image_service=None, disk_format='raw'): self.temp_images = None self.disk_format = disk_format def show(self, context, image_id): return {'size': 2 * units.Gi, 'disk_format': self.disk_format, 'container_format': 'compressed', 'status': 'active'} ctxt = mock.sentinel.context ctxt.user_id = mock.sentinel.user_id image_service = FakeImageService() image_id = mock.sentinel.image_id dest = mock.sentinel.dest volume_format = mock.sentinel.volume_format out_subformat = None blocksize = mock.sentinel.blocksize data = mock_info.return_value data.file_format = volume_format data.backing_file = None data.virtual_size = 1234 tmp = mock_temp.return_value.__enter__.return_value mock_engine = mock.Mock(spec=fakeEngine) mock_get_engine.return_value = mock_engine output = image_utils.fetch_to_volume_format(ctxt, image_service, image_id, dest, volume_format, blocksize) self.assertIsNone(output) mock_temp.assert_called_once_with() mock_info.assert_has_calls([ mock.call(tmp, force_share=False, run_as_root=True), mock.call(tmp, run_as_root=True)]) mock_fetch.assert_called_once_with(ctxt, image_service, image_id, tmp, None, None) self.assertFalse(mock_repl_xen.called) self.assertFalse(mock_copy.called) mock_convert.assert_called_once_with(tmp, dest, volume_format, out_subformat=out_subformat, run_as_root=True, src_format='raw') mock_engine.decompress_img.assert_called() class TestXenserverUtils(test.TestCase): def test_is_xenserver_format(self): image_meta1 = {'disk_format': 'vhd', 'container_format': 'ovf'} self.assertTrue(image_utils.is_xenserver_format(image_meta1)) image_meta2 = {'disk_format': 'test_disk_format', 'container_format': 'test_cont_format'} self.assertFalse(image_utils.is_xenserver_format(image_meta2)) @mock.patch('cinder.image.image_utils.utils.execute') def test_extract_targz(self, mock_exec): name = mock.sentinel.archive_name target = mock.sentinel.target output = image_utils.extract_targz(name, target) mock_exec.assert_called_once_with('tar', '-xzf', name, '-C', target) self.assertIsNone(output) class TestVhdUtils(test.TestCase): @mock.patch('cinder.image.image_utils.utils.execute') def test_set_vhd_parent(self, mock_exec): vhd_path = mock.sentinel.vhd_path parentpath = mock.sentinel.parentpath output = image_utils.set_vhd_parent(vhd_path, parentpath) mock_exec.assert_called_once_with('vhd-util', 'modify', '-n', vhd_path, '-p', parentpath) self.assertIsNone(output) @mock.patch('cinder.image.image_utils.set_vhd_parent') def test_fix_vhd_chain(self, mock_set_parent): vhd_chain = (mock.sentinel.first, mock.sentinel.second, mock.sentinel.third, mock.sentinel.fourth, mock.sentinel.fifth) output = image_utils.fix_vhd_chain(vhd_chain) self.assertIsNone(output) mock_set_parent.assert_has_calls([ mock.call(mock.sentinel.first, mock.sentinel.second), mock.call(mock.sentinel.second, mock.sentinel.third), mock.call(mock.sentinel.third, mock.sentinel.fourth), mock.call(mock.sentinel.fourth, mock.sentinel.fifth)]) @mock.patch('cinder.image.image_utils.utils.execute', return_value=(98765.43210, mock.sentinel.error)) def test_get_vhd_size(self, mock_exec): vhd_path = mock.sentinel.vhd_path output = image_utils.get_vhd_size(vhd_path) mock_exec.assert_called_once_with('vhd-util', 'query', '-n', vhd_path, '-v') self.assertEqual(98765, output) @mock.patch('cinder.image.image_utils.utils.execute') def test_resize_vhd(self, mock_exec): vhd_path = mock.sentinel.vhd_path size = 387549349 journal = mock.sentinel.journal output = image_utils.resize_vhd(vhd_path, size, journal) self.assertIsNone(output) mock_exec.assert_called_once_with('vhd-util', 'resize', '-n', vhd_path, '-s', str(size), '-j', journal) @mock.patch('cinder.image.image_utils.utils.execute') def test_coalesce_vhd(self, mock_exec): vhd_path = mock.sentinel.vhd_path output = image_utils.coalesce_vhd(vhd_path) self.assertIsNone(output) mock_exec.assert_called_once_with('vhd-util', 'coalesce', '-n', vhd_path) @mock.patch('cinder.image.image_utils.temporary_dir') @mock.patch('cinder.image.image_utils.coalesce_vhd') @mock.patch('cinder.image.image_utils.resize_vhd') @mock.patch('cinder.image.image_utils.get_vhd_size') @mock.patch('cinder.image.image_utils.utils.execute') def test_coalesce_chain(self, mock_exec, mock_size, mock_resize, mock_coal, mock_temp): vhd_chain = (mock.sentinel.first, mock.sentinel.second, mock.sentinel.third, mock.sentinel.fourth, mock.sentinel.fifth) mock_temp.return_value.__enter__.return_value = 'fake_temp_dir' output = image_utils.coalesce_chain(vhd_chain) self.assertEqual(mock.sentinel.fifth, output) mock_size.assert_has_calls([ mock.call(mock.sentinel.first), mock.call(mock.sentinel.second), mock.call(mock.sentinel.third), mock.call(mock.sentinel.fourth)]) mock_resize.assert_has_calls([ mock.call(mock.sentinel.second, mock_size.return_value, mock.ANY), mock.call(mock.sentinel.third, mock_size.return_value, mock.ANY), mock.call(mock.sentinel.fourth, mock_size.return_value, mock.ANY), mock.call(mock.sentinel.fifth, mock_size.return_value, mock.ANY)]) mock_coal.assert_has_calls([ mock.call(mock.sentinel.first), mock.call(mock.sentinel.second), mock.call(mock.sentinel.third), mock.call(mock.sentinel.fourth)]) @mock.patch('cinder.image.image_utils.os.path') def test_discover_vhd_chain(self, mock_path): directory = '/some/test/directory' mock_path.join.side_effect = lambda x, y: '/'.join((x, y)) mock_path.exists.side_effect = (True, True, True, False) output = image_utils.discover_vhd_chain(directory) expected_output = ['/some/test/directory/0.vhd', '/some/test/directory/1.vhd', '/some/test/directory/2.vhd'] self.assertEqual(expected_output, output) @mock.patch('cinder.image.image_utils.temporary_dir') @mock.patch('cinder.image.image_utils.os.rename') @mock.patch('cinder.image.image_utils.fileutils.delete_if_exists') @mock.patch('cinder.image.image_utils.coalesce_chain') @mock.patch('cinder.image.image_utils.fix_vhd_chain') @mock.patch('cinder.image.image_utils.discover_vhd_chain') @mock.patch('cinder.image.image_utils.extract_targz') def test_replace_xenserver_image_with_coalesced_vhd( self, mock_targz, mock_discover, mock_fix, mock_coal, mock_delete, mock_rename, mock_temp): image_file = mock.sentinel.image_file tmp = mock_temp.return_value.__enter__.return_value output = image_utils.replace_xenserver_image_with_coalesced_vhd( image_file) self.assertIsNone(output) mock_targz.assert_called_once_with(image_file, tmp) mock_discover.assert_called_once_with(tmp) mock_fix.assert_called_once_with(mock_discover.return_value) mock_coal.assert_called_once_with(mock_discover.return_value) mock_delete.assert_called_once_with(image_file) mock_rename.assert_called_once_with(mock_coal.return_value, image_file) class TestCreateTemporaryFile(test.TestCase): @mock.patch('cinder.image.image_utils.os.close') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.makedirs') @mock.patch('cinder.image.image_utils.tempfile.mkstemp') def test_create_temporary_file_no_dir(self, mock_mkstemp, mock_dirs, mock_conf, mock_close): mock_conf.image_conversion_dir = None fd = mock.sentinel.file_descriptor path = mock.sentinel.absolute_pathname mock_mkstemp.return_value = (fd, path) output = image_utils.create_temporary_file() self.assertEqual(path, output) mock_mkstemp.assert_called_once_with(dir=None) mock_close.assert_called_once_with(fd) @mock.patch('cinder.image.image_utils.os.close') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.makedirs') @mock.patch('cinder.image.image_utils.tempfile.mkstemp') def test_create_temporary_file_with_dir(self, mock_mkstemp, mock_dirs, mock_conf, mock_close): conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir fd = mock.sentinel.file_descriptor path = mock.sentinel.absolute_pathname mock_mkstemp.return_value = (fd, path) output = image_utils.create_temporary_file() self.assertEqual(path, output) self.assertTrue(mock_dirs.called) mock_mkstemp.assert_called_once_with(dir=conv_dir) mock_close.assert_called_once_with(fd) @mock.patch('cinder.image.image_utils.os.close') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.fileutils.ensure_tree') @mock.patch('cinder.image.image_utils.tempfile.mkstemp') def test_create_temporary_file_and_dir(self, mock_mkstemp, mock_dirs, mock_conf, mock_close): conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir fd = mock.sentinel.file_descriptor path = mock.sentinel.absolute_pathname mock_mkstemp.return_value = (fd, path) output = image_utils.create_temporary_file() self.assertEqual(path, output) mock_dirs.assert_called_once_with(conv_dir) mock_mkstemp.assert_called_once_with(dir=conv_dir) mock_close.assert_called_once_with(fd) @mock.patch('cinder.image.image_utils.os.remove') @mock.patch('cinder.image.image_utils.os.path.join') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.listdir') @mock.patch('cinder.image.image_utils.os.path.exists', return_value=True) def test_cleanup_temporary_file(self, mock_path, mock_listdir, mock_conf, mock_join, mock_remove): mock_listdir.return_value = ['tmphost@backend1', 'tmphost@backend2'] conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir mock_join.return_value = '/test/tmp/tmphost@backend1' image_utils.cleanup_temporary_file('host@backend1') mock_listdir.assert_called_once_with(conv_dir) mock_remove.assert_called_once_with('/test/tmp/tmphost@backend1') @mock.patch('cinder.image.image_utils.os.remove') @mock.patch('cinder.image.image_utils.os.listdir') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.path.exists', return_value=False) def test_cleanup_temporary_file_with_not_exist_path(self, mock_path, mock_conf, mock_listdir, mock_remove): conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir image_utils.cleanup_temporary_file('host@backend1') self.assertFalse(mock_listdir.called) self.assertFalse(mock_remove.called) @mock.patch('cinder.image.image_utils.os.remove') @mock.patch('cinder.image.image_utils.os.path.join') @mock.patch('cinder.image.image_utils.CONF') @mock.patch('cinder.image.image_utils.os.listdir') @mock.patch('cinder.image.image_utils.os.path.exists', return_value=True) def test_cleanup_temporary_file_with_exception(self, mock_path, mock_listdir, mock_conf, mock_join, mock_remove): mock_listdir.return_value = ['tmphost@backend1', 'tmphost@backend2'] conv_dir = mock.sentinel.image_conversion_dir mock_conf.image_conversion_dir = conv_dir mock_join.return_value = '/test/tmp/tmphost@backend1' mock_remove.side_effect = OSError image_utils.cleanup_temporary_file('host@backend1') mock_listdir.assert_called_once_with(conv_dir) mock_remove.assert_called_once_with('/test/tmp/tmphost@backend1') class TestTemporaryFileContextManager(test.TestCase): @mock.patch('cinder.image.image_utils.create_temporary_file', return_value=mock.sentinel.temporary_file) @mock.patch('cinder.image.image_utils.fileutils.delete_if_exists') def test_temporary_file(self, mock_delete, mock_create): with image_utils.temporary_file() as tmp_file: self.assertEqual(mock.sentinel.temporary_file, tmp_file) self.assertFalse(mock_delete.called) mock_delete.assert_called_once_with(mock.sentinel.temporary_file) class TestImageUtils(test.TestCase): def test_get_virtual_size(self): image_id = fake.IMAGE_ID virtual_size = 1073741824 volume_size = 2 virt_size = image_utils.check_virtual_size(virtual_size, volume_size, image_id) self.assertEqual(1, virt_size) def test_get_bigger_virtual_size(self): image_id = fake.IMAGE_ID virtual_size = 3221225472 volume_size = 2 self.assertRaises(exception.ImageUnacceptable, image_utils.check_virtual_size, virtual_size, volume_size, image_id) def test_decode_cipher(self): expected = {'cipher_alg': 'aes-256', 'cipher_mode': 'xts', 'ivgen_alg': 'essiv'} result = image_utils.decode_cipher('aes-xts-essiv', 256) self.assertEqual(expected, result)
true
true
f7040ee5ec86a17f46a2c70c781f165631fe9b72
1,119
py
Python
src/models/model.py
schibsen/MLops_exercises_organized
2c9b386fed7b1e400524905cb68f220caf9d015b
[ "MIT" ]
null
null
null
src/models/model.py
schibsen/MLops_exercises_organized
2c9b386fed7b1e400524905cb68f220caf9d015b
[ "MIT" ]
null
null
null
src/models/model.py
schibsen/MLops_exercises_organized
2c9b386fed7b1e400524905cb68f220caf9d015b
[ "MIT" ]
null
null
null
import torch import torch.nn as nn import torch.nn.functional as F class MyAwesomeModel(nn.Module): def __init__(self, n_classes): super(MyAwesomeModel, self).__init__() self.feature_extractor = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=6, kernel_size=4, stride=1), nn.Tanh(), nn.AvgPool2d(kernel_size=2), nn.Conv2d(in_channels=6, out_channels=16, kernel_size=4, stride=1), nn.Tanh(), nn.AvgPool2d(kernel_size=2), nn.Conv2d(in_channels=16, out_channels=120, kernel_size=4, stride=1), nn.Tanh(), ) self.classifier = nn.Sequential( nn.Linear(in_features=120, out_features=84), nn.Tanh(), nn.Linear(in_features=84, out_features=n_classes), ) def forward(self, x, return_features=False): x = self.feature_extractor(x) x = torch.flatten(x, 1) logits = self.classifier(x) probs = F.log_softmax(logits, dim=1) if return_features: return x else: return probs
31.083333
81
0.592493
import torch import torch.nn as nn import torch.nn.functional as F class MyAwesomeModel(nn.Module): def __init__(self, n_classes): super(MyAwesomeModel, self).__init__() self.feature_extractor = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=6, kernel_size=4, stride=1), nn.Tanh(), nn.AvgPool2d(kernel_size=2), nn.Conv2d(in_channels=6, out_channels=16, kernel_size=4, stride=1), nn.Tanh(), nn.AvgPool2d(kernel_size=2), nn.Conv2d(in_channels=16, out_channels=120, kernel_size=4, stride=1), nn.Tanh(), ) self.classifier = nn.Sequential( nn.Linear(in_features=120, out_features=84), nn.Tanh(), nn.Linear(in_features=84, out_features=n_classes), ) def forward(self, x, return_features=False): x = self.feature_extractor(x) x = torch.flatten(x, 1) logits = self.classifier(x) probs = F.log_softmax(logits, dim=1) if return_features: return x else: return probs
true
true
f704100c34d2b9a3829c69e2437756a4a7bef023
403
py
Python
tests/year_2021/test_day_2021_01.py
gansanay/adventofcode
2ef8a50d9d8805ff780271559d43a9036a38f041
[ "MIT" ]
null
null
null
tests/year_2021/test_day_2021_01.py
gansanay/adventofcode
2ef8a50d9d8805ff780271559d43a9036a38f041
[ "MIT" ]
null
null
null
tests/year_2021/test_day_2021_01.py
gansanay/adventofcode
2ef8a50d9d8805ff780271559d43a9036a38f041
[ "MIT" ]
null
null
null
from adventofcode.year_2021.day_2021_01 import readable, short def test_readable_part_one(): answer = readable.part1() assert answer == 1616 def test_readable_part_two(): answer = readable.part2() assert answer == 1645 def test_short_part_one(): answer = short.part1() assert answer == 1616 def test_short_part_two(): answer = short.part2() assert answer == 1645
18.318182
62
0.702233
from adventofcode.year_2021.day_2021_01 import readable, short def test_readable_part_one(): answer = readable.part1() assert answer == 1616 def test_readable_part_two(): answer = readable.part2() assert answer == 1645 def test_short_part_one(): answer = short.part1() assert answer == 1616 def test_short_part_two(): answer = short.part2() assert answer == 1645
true
true
f70410f3a9b940b89f700e9a61dff38a26375e5f
808
py
Python
djangoecommerce/urls.py
ijohnnysa/djangoecommerce
53b72915bf3f36fb7e75fbd8caa87d08d860b8d5
[ "MIT" ]
null
null
null
djangoecommerce/urls.py
ijohnnysa/djangoecommerce
53b72915bf3f36fb7e75fbd8caa87d08d860b8d5
[ "MIT" ]
null
null
null
djangoecommerce/urls.py
ijohnnysa/djangoecommerce
53b72915bf3f36fb7e75fbd8caa87d08d860b8d5
[ "MIT" ]
null
null
null
"""djangoecommerce URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/3.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.urls import path from core.views import index urlpatterns = [ path('', index), path('admin/', admin.site.urls), ]
32.32
77
0.707921
from django.contrib import admin from django.urls import path from core.views import index urlpatterns = [ path('', index), path('admin/', admin.site.urls), ]
true
true
f7041307e33cd1201a9c5c859e8fdf222c915063
10,525
py
Python
stable_baselines/cmaes/cma_redo.py
hugerepo-tianhang/low_dim_update_stable
565f6cbf886d266d0633bc112ccae28f1d116ee1
[ "MIT" ]
null
null
null
stable_baselines/cmaes/cma_redo.py
hugerepo-tianhang/low_dim_update_stable
565f6cbf886d266d0633bc112ccae28f1d116ee1
[ "MIT" ]
null
null
null
stable_baselines/cmaes/cma_redo.py
hugerepo-tianhang/low_dim_update_stable
565f6cbf886d266d0633bc112ccae28f1d116ee1
[ "MIT" ]
null
null
null
from stable_baselines.ppo2.run_mujoco import eval_return import cma import numpy as np from stable_baselines.low_dim_analysis.eval_util import * from stable_baselines.low_dim_analysis.common import do_pca, plot_2d, \ dump_rows_write_csv, generate_run_dir, do_proj_on_first_n_IPCA, get_allinone_concat_df from sklearn.decomposition import IncrementalPCA from stable_baselines import logger import pandas as pd from sklearn.decomposition import PCA from joblib import Parallel, delayed from matplotlib import pyplot as plt import time import os from stable_baselines.common.cmd_util import mujoco_arg_parser from stable_baselines.low_dim_analysis.common_parser import get_common_parser from numpy import linalg as LA def plot_cma_returns(plot_dir_alg, name, mean_rets, min_rets, max_rets, show): X = np.arange(len(mean_rets)) fig, ax = plt.subplots() plt.xlabel('num of eval') plt.ylabel('mean returns with min and max filled') ax.plot(X, mean_rets) ax.fill_between(X, min_rets, max_rets, alpha=0.5) file_path = f"{plot_dir_alg}/{name}.pdf" if os.path.isfile(file_path): os.remove(file_path) logger.log(f"saving cma plot to {file_path}") fig.savefig(file_path, dpi=300, bbox_inches='tight', format='pdf') if show: plt.show() def do_cma(cma_args, first_n_pcs, orgin_param, save_dir, starting_coord, var): tic = time.time() #TODO better starting locations, record how many samples, logger.log(f"CMAES STARTING :{starting_coord}") es = cma.CMAEvolutionStrategy(starting_coord, var) total_num_of_evals = 0 total_num_timesteps = 0 mean_rets = [] min_rets = [] max_rets = [] eval_returns = None optimization_path = [] while total_num_timesteps < cma_args.cma_num_timesteps and not es.stop(): solutions = es.ask() optimization_path.extend(solutions) thetas = [np.matmul(coord, first_n_pcs) + orgin_param for coord in solutions] logger.log(f"current time steps num: {total_num_timesteps} total time steps: {cma_args.cma_num_timesteps}") eval_returns = Parallel(n_jobs=cma_args.cores_to_use) \ (delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for (i, theta) in enumerate(thetas)) mean_rets.append(np.mean(eval_returns)) min_rets.append(np.min(eval_returns)) max_rets.append(np.max(eval_returns)) total_num_of_evals += len(eval_returns) total_num_timesteps += cma_args.eval_num_timesteps * len(eval_returns) logger.log(f"current eval returns: {str(eval_returns)}") logger.log(f"total timesteps so far: {total_num_timesteps}") negative_eval_returns = [-r for r in eval_returns] es.tell(solutions, negative_eval_returns) es.logger.add() # write data to disc to be plotted es.disp() toc = time.time() logger.log(f"####################################CMA took {toc-tic} seconds") es_logger = es.logger if not hasattr(es_logger, 'xmean'): es_logger.load() n_comp_used = first_n_pcs.shape[0] optimization_path_mean = np.vstack((starting_coord, es_logger.xmean[:,5:5+n_comp_used])) return mean_rets, min_rets, max_rets, np.array(optimization_path), np.array(optimization_path_mean) def main(): import sys logger.log(sys.argv) common_arg_parser = get_common_parser() cma_args, cma_unknown_args = common_arg_parser.parse_known_args() origin = "mean_param" this_run_dir = get_dir_path_for_this_run(cma_args) traj_params_dir_name = get_full_params_dir(this_run_dir) intermediate_data_dir = get_intermediate_data_dir(this_run_dir) save_dir = get_save_dir( this_run_dir) if not os.path.exists(intermediate_data_dir): os.makedirs(intermediate_data_dir) cma_run_num, cma_intermediate_data_dir = generate_run_dir(get_cma_returns_dirname, intermediate_dir=intermediate_data_dir, n_comp=cma_args.n_comp_to_use) ''' ========================================================================================== get the pc vectors ========================================================================================== ''' logger.log("grab final params") final_file = get_full_param_traj_file_path(traj_params_dir_name, "final") final_param = pd.read_csv(final_file, header=None).values[0] final_pca = IncrementalPCA(n_components=2) # for sparse PCA to speed up theta_file = get_full_param_traj_file_path(traj_params_dir_name, 0) concat_df = pd.read_csv(theta_file, header=None, chunksize=10000) tic = time.time() for chunk in concat_df: logger.log(f"currnet at : {concat_df._currow}") if chunk.shape[0] < 2: logger.log(f"last column too few: {chunk.shape[0]}") continue final_pca.partial_fit(chunk.values) toc = time.time() logger.log('\nElapsed time computing the chunked PCA {:.2f} s\n' .format(toc - tic)) logger.log(final_pca.explained_variance_ratio_) pcs_components = final_pca.components_ first_2_pcs = pcs_components[:2] mean_param = final_pca.mean_ origin_param = mean_param theta_file = get_full_param_traj_file_path(traj_params_dir_name, 0) concat_df = pd.read_csv(theta_file, header=None, chunksize=10000) proj_coords = do_proj_on_first_n_IPCA(concat_df, first_2_pcs, origin_param) ''' ========================================================================================== eval all xy coords ========================================================================================== ''' from stable_baselines.low_dim_analysis.common import plot_contour_trajectory, gen_subspace_coords,do_eval_returns, \ get_allinone_concat_df, do_proj_on_first_n from stable_baselines.ppo2.run_mujoco import eval_return last_proj_coord = do_proj_on_first_n(final_param, first_2_pcs, origin_param) starting_coord = last_proj_coord tic = time.time() #TODO better starting locations, record how many samples, logger.log(f"CMAES STARTING :{starting_coord}") es = cma.CMAEvolutionStrategy(starting_coord, 5) total_num_of_evals = 0 total_num_timesteps = 0 mean_rets = [] min_rets = [] max_rets = [] eval_returns = None optimization_path = [] while total_num_timesteps < cma_args.cma_num_timesteps and not es.stop(): solutions = es.ask() optimization_path.extend(solutions) thetas = [np.matmul(coord, first_2_pcs) + origin_param for coord in solutions] logger.log(f"current time steps num: {total_num_timesteps} total time steps: {cma_args.cma_num_timesteps}") eval_returns = Parallel(n_jobs=cma_args.cores_to_use) \ (delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for (i, theta) in enumerate(thetas)) mean_rets.append(np.mean(eval_returns)) min_rets.append(np.min(eval_returns)) max_rets.append(np.max(eval_returns)) total_num_of_evals += len(eval_returns) total_num_timesteps += cma_args.eval_num_timesteps * len(eval_returns) logger.log(f"current eval returns: {str(eval_returns)}") logger.log(f"total timesteps so far: {total_num_timesteps}") negative_eval_returns = [-r for r in eval_returns] es.tell(solutions, negative_eval_returns) es.logger.add() # write data to disc to be plotted es.disp() toc = time.time() logger.log(f"####################################CMA took {toc-tic} seconds") es_logger = es.logger if not hasattr(es_logger, 'xmean'): es_logger.load() n_comp_used = first_2_pcs.shape[0] optimization_path_mean = np.vstack((starting_coord, es_logger.xmean[:,5:5+n_comp_used])) dump_rows_write_csv(cma_intermediate_data_dir, optimization_path_mean, "opt_mean_path") plot_dir = get_plot_dir(cma_args) cma_plot_dir = get_cma_plot_dir(plot_dir, cma_args.n_comp_to_use, cma_run_num, origin=origin) if not os.path.exists(cma_plot_dir): os.makedirs(cma_plot_dir) ret_plot_name = f"cma return on {cma_args.n_comp_to_use} dim space of real pca plane, " \ f"explained {np.sum(final_pca.explained_variance_ratio_[:2])}" plot_cma_returns(cma_plot_dir, ret_plot_name, mean_rets, min_rets, max_rets, show=False) assert proj_coords.shape[1] == 2 xcoordinates_to_eval, ycoordinates_to_eval = gen_subspace_coords(cma_args, np.vstack((proj_coords, optimization_path_mean)).T) from stable_baselines.ppo2.run_mujoco import eval_return thetas_to_eval = [origin_param + x * first_2_pcs[0] + y * first_2_pcs[1] for y in ycoordinates_to_eval for x in xcoordinates_to_eval] tic = time.time() eval_returns = Parallel(n_jobs=-1, max_nbytes='100M') \ (delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for (i, theta) in enumerate(thetas_to_eval)) toc = time.time() logger.log(f"####################################1st version took {toc-tic} seconds") plot_contour_trajectory(cma_plot_dir, f"cma redo___{origin}_origin_eval_return_contour_plot", xcoordinates_to_eval, ycoordinates_to_eval, eval_returns, proj_coords[:, 0], proj_coords[:, 1], final_pca.explained_variance_ratio_, num_levels=25, show=False, sub_alg_path=optimization_path_mean.T) opt_mean_path_in_old_basis = [mean_projected_param.dot(first_2_pcs) + mean_param for mean_projected_param in optimization_path_mean] distance_to_final = [LA.norm(opt_mean - final_param, ord=2) for opt_mean in opt_mean_path_in_old_basis] distance_to_final_plot_name = f"cma redo distance_to_final over generations " plot_2d(cma_plot_dir, distance_to_final_plot_name, np.arange(len(distance_to_final)), distance_to_final, "num generation", "distance_to_final", False) # plot_3d_trajectory(cma_plot_dir, "end_point_origin_eval_return_3d_plot", xcoordinates_to_eval, ycoordinates_to_eval, # eval_returns, proj_xcoord, proj_ycoord, # result["explained_variance_ratio"][:2], # num_levels=15, show=False) if __name__ == '__main__': main() #TODO Give filenames more info to identify which hyperparameter is the data for
34.966777
157
0.67924
from stable_baselines.ppo2.run_mujoco import eval_return import cma import numpy as np from stable_baselines.low_dim_analysis.eval_util import * from stable_baselines.low_dim_analysis.common import do_pca, plot_2d, \ dump_rows_write_csv, generate_run_dir, do_proj_on_first_n_IPCA, get_allinone_concat_df from sklearn.decomposition import IncrementalPCA from stable_baselines import logger import pandas as pd from sklearn.decomposition import PCA from joblib import Parallel, delayed from matplotlib import pyplot as plt import time import os from stable_baselines.common.cmd_util import mujoco_arg_parser from stable_baselines.low_dim_analysis.common_parser import get_common_parser from numpy import linalg as LA def plot_cma_returns(plot_dir_alg, name, mean_rets, min_rets, max_rets, show): X = np.arange(len(mean_rets)) fig, ax = plt.subplots() plt.xlabel('num of eval') plt.ylabel('mean returns with min and max filled') ax.plot(X, mean_rets) ax.fill_between(X, min_rets, max_rets, alpha=0.5) file_path = f"{plot_dir_alg}/{name}.pdf" if os.path.isfile(file_path): os.remove(file_path) logger.log(f"saving cma plot to {file_path}") fig.savefig(file_path, dpi=300, bbox_inches='tight', format='pdf') if show: plt.show() def do_cma(cma_args, first_n_pcs, orgin_param, save_dir, starting_coord, var): tic = time.time() logger.log(f"CMAES STARTING :{starting_coord}") es = cma.CMAEvolutionStrategy(starting_coord, var) total_num_of_evals = 0 total_num_timesteps = 0 mean_rets = [] min_rets = [] max_rets = [] eval_returns = None optimization_path = [] while total_num_timesteps < cma_args.cma_num_timesteps and not es.stop(): solutions = es.ask() optimization_path.extend(solutions) thetas = [np.matmul(coord, first_n_pcs) + orgin_param for coord in solutions] logger.log(f"current time steps num: {total_num_timesteps} total time steps: {cma_args.cma_num_timesteps}") eval_returns = Parallel(n_jobs=cma_args.cores_to_use) \ (delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for (i, theta) in enumerate(thetas)) mean_rets.append(np.mean(eval_returns)) min_rets.append(np.min(eval_returns)) max_rets.append(np.max(eval_returns)) total_num_of_evals += len(eval_returns) total_num_timesteps += cma_args.eval_num_timesteps * len(eval_returns) logger.log(f"current eval returns: {str(eval_returns)}") logger.log(f"total timesteps so far: {total_num_timesteps}") negative_eval_returns = [-r for r in eval_returns] es.tell(solutions, negative_eval_returns) es.logger.add() es.disp() toc = time.time() logger.log(f"####################################CMA took {toc-tic} seconds") es_logger = es.logger if not hasattr(es_logger, 'xmean'): es_logger.load() n_comp_used = first_n_pcs.shape[0] optimization_path_mean = np.vstack((starting_coord, es_logger.xmean[:,5:5+n_comp_used])) return mean_rets, min_rets, max_rets, np.array(optimization_path), np.array(optimization_path_mean) def main(): import sys logger.log(sys.argv) common_arg_parser = get_common_parser() cma_args, cma_unknown_args = common_arg_parser.parse_known_args() origin = "mean_param" this_run_dir = get_dir_path_for_this_run(cma_args) traj_params_dir_name = get_full_params_dir(this_run_dir) intermediate_data_dir = get_intermediate_data_dir(this_run_dir) save_dir = get_save_dir( this_run_dir) if not os.path.exists(intermediate_data_dir): os.makedirs(intermediate_data_dir) cma_run_num, cma_intermediate_data_dir = generate_run_dir(get_cma_returns_dirname, intermediate_dir=intermediate_data_dir, n_comp=cma_args.n_comp_to_use) logger.log("grab final params") final_file = get_full_param_traj_file_path(traj_params_dir_name, "final") final_param = pd.read_csv(final_file, header=None).values[0] final_pca = IncrementalPCA(n_components=2) theta_file = get_full_param_traj_file_path(traj_params_dir_name, 0) concat_df = pd.read_csv(theta_file, header=None, chunksize=10000) tic = time.time() for chunk in concat_df: logger.log(f"currnet at : {concat_df._currow}") if chunk.shape[0] < 2: logger.log(f"last column too few: {chunk.shape[0]}") continue final_pca.partial_fit(chunk.values) toc = time.time() logger.log('\nElapsed time computing the chunked PCA {:.2f} s\n' .format(toc - tic)) logger.log(final_pca.explained_variance_ratio_) pcs_components = final_pca.components_ first_2_pcs = pcs_components[:2] mean_param = final_pca.mean_ origin_param = mean_param theta_file = get_full_param_traj_file_path(traj_params_dir_name, 0) concat_df = pd.read_csv(theta_file, header=None, chunksize=10000) proj_coords = do_proj_on_first_n_IPCA(concat_df, first_2_pcs, origin_param) from stable_baselines.low_dim_analysis.common import plot_contour_trajectory, gen_subspace_coords,do_eval_returns, \ get_allinone_concat_df, do_proj_on_first_n from stable_baselines.ppo2.run_mujoco import eval_return last_proj_coord = do_proj_on_first_n(final_param, first_2_pcs, origin_param) starting_coord = last_proj_coord tic = time.time() logger.log(f"CMAES STARTING :{starting_coord}") es = cma.CMAEvolutionStrategy(starting_coord, 5) total_num_of_evals = 0 total_num_timesteps = 0 mean_rets = [] min_rets = [] max_rets = [] eval_returns = None optimization_path = [] while total_num_timesteps < cma_args.cma_num_timesteps and not es.stop(): solutions = es.ask() optimization_path.extend(solutions) thetas = [np.matmul(coord, first_2_pcs) + origin_param for coord in solutions] logger.log(f"current time steps num: {total_num_timesteps} total time steps: {cma_args.cma_num_timesteps}") eval_returns = Parallel(n_jobs=cma_args.cores_to_use) \ (delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for (i, theta) in enumerate(thetas)) mean_rets.append(np.mean(eval_returns)) min_rets.append(np.min(eval_returns)) max_rets.append(np.max(eval_returns)) total_num_of_evals += len(eval_returns) total_num_timesteps += cma_args.eval_num_timesteps * len(eval_returns) logger.log(f"current eval returns: {str(eval_returns)}") logger.log(f"total timesteps so far: {total_num_timesteps}") negative_eval_returns = [-r for r in eval_returns] es.tell(solutions, negative_eval_returns) es.logger.add() es.disp() toc = time.time() logger.log(f"####################################CMA took {toc-tic} seconds") es_logger = es.logger if not hasattr(es_logger, 'xmean'): es_logger.load() n_comp_used = first_2_pcs.shape[0] optimization_path_mean = np.vstack((starting_coord, es_logger.xmean[:,5:5+n_comp_used])) dump_rows_write_csv(cma_intermediate_data_dir, optimization_path_mean, "opt_mean_path") plot_dir = get_plot_dir(cma_args) cma_plot_dir = get_cma_plot_dir(plot_dir, cma_args.n_comp_to_use, cma_run_num, origin=origin) if not os.path.exists(cma_plot_dir): os.makedirs(cma_plot_dir) ret_plot_name = f"cma return on {cma_args.n_comp_to_use} dim space of real pca plane, " \ f"explained {np.sum(final_pca.explained_variance_ratio_[:2])}" plot_cma_returns(cma_plot_dir, ret_plot_name, mean_rets, min_rets, max_rets, show=False) assert proj_coords.shape[1] == 2 xcoordinates_to_eval, ycoordinates_to_eval = gen_subspace_coords(cma_args, np.vstack((proj_coords, optimization_path_mean)).T) from stable_baselines.ppo2.run_mujoco import eval_return thetas_to_eval = [origin_param + x * first_2_pcs[0] + y * first_2_pcs[1] for y in ycoordinates_to_eval for x in xcoordinates_to_eval] tic = time.time() eval_returns = Parallel(n_jobs=-1, max_nbytes='100M') \ (delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for (i, theta) in enumerate(thetas_to_eval)) toc = time.time() logger.log(f"####################################1st version took {toc-tic} seconds") plot_contour_trajectory(cma_plot_dir, f"cma redo___{origin}_origin_eval_return_contour_plot", xcoordinates_to_eval, ycoordinates_to_eval, eval_returns, proj_coords[:, 0], proj_coords[:, 1], final_pca.explained_variance_ratio_, num_levels=25, show=False, sub_alg_path=optimization_path_mean.T) opt_mean_path_in_old_basis = [mean_projected_param.dot(first_2_pcs) + mean_param for mean_projected_param in optimization_path_mean] distance_to_final = [LA.norm(opt_mean - final_param, ord=2) for opt_mean in opt_mean_path_in_old_basis] distance_to_final_plot_name = f"cma redo distance_to_final over generations " plot_2d(cma_plot_dir, distance_to_final_plot_name, np.arange(len(distance_to_final)), distance_to_final, "num generation", "distance_to_final", False) if __name__ == '__main__': main()
true
true
f7041309a6dc5d310a94af04643b0d5d9782e963
4,502
py
Python
tests/test_tub_latex_converter.py
pooya-raz/TubLatexMaker
9b9f9803286e6acf2f41ec89f7bc8c98fbd4ba72
[ "MIT" ]
null
null
null
tests/test_tub_latex_converter.py
pooya-raz/TubLatexMaker
9b9f9803286e6acf2f41ec89f7bc8c98fbd4ba72
[ "MIT" ]
null
null
null
tests/test_tub_latex_converter.py
pooya-raz/TubLatexMaker
9b9f9803286e6acf2f41ec89f7bc8c98fbd4ba72
[ "MIT" ]
null
null
null
import tublatexmaker.latex_creater as convert dict_of_entries = { "(Bahth fī) uṣūl al-fiqh": { "displaytitle": "", "exists": "1", "fulltext": "(Bahth fī) uṣūl al-fiqh", "fullurl": "http://144.173.140.108:8080/tub/index.php/(Bahth_f%C4%AB)_u%E1%B9%A3%C5%ABl_al-fiqh", "namespace": 0, "printouts": { "Title (Arabic)": ["بحث في) أصول " "الفقه)"], "Title (transliterated)": ["(Bahth " "fī) " "uṣūl " "al-fiqh"], "Has author(s)": [{"fulltext": "Muḥammad Jawād b. Aḥmad"}], "Death (Hijri)": [1299], "Death (Gregorian)": [{"timestamp": "-2776982400", "raw": "1/1882"}], "Death (Hijri) text": ["13th century"], "Death (Gregorian) text": ["19th century"], }, } } edition = [ { "City": [ { "fulltext": "Qum", "fullurl": "http://144.173.140.108:8080/tub/index.php/Qum", "namespace": 0, "exists": "1", "displaytitle": "Qom", } ], "Edition type": ["Modern print"], "Has a publisher": ["Majmaʿ al-Fikr al-Islāmī"], "Has editor(s)": ["unknown"], "Published edition of title": [ { "fulltext": "al-Fawāʾid al-Ḥāʾiriyya", "fullurl": "http://144.173.140.108:8080/tub/index.php/al-Faw%C4%81%CA%BEid_al-%E1%B8%A4%C4%81%CA%BEiriyya", "namespace": 0, "exists": "1", "displaytitle": "", } ], "Sort title": ["al-Fawaid al-Ḥairiyya"], "Title (Arabic)": ["الفوائد الحائرية"], "Title (transliterated)": ["al-Fawāʾid al-Ḥāʾiriyya"], "Year (Gregorian)": [], "Year (Gregorian) text": [], "Year (Hijri)": [], "Year (Hijri) text": [], "page_name": "Edition:al-Fawāʾid al-Ḥāʾiriyya", }, { "City": [ { "fulltext": "Qum", "fullurl": "http://144.173.140.108:8080/tub/index.php/Qum", "namespace": 0, "exists": "1", "displaytitle": "Qom", } ], "Edition type": ["Modern print"], "Has a publisher": ["Majmaʿ al-Fikr al-Islāmī"], "Has editor(s)": ["unknown"], "Published edition of title": [ { "fulltext": "al-Fawāʾid al-Ḥāʾiriyya", "fullurl": "http://144.173.140.108:8080/tub/index.php/al-Faw%C4%81%CA%BEid_al-%E1%B8%A4%C4%81%CA%BEiriyya", "namespace": 0, "exists": "1", "displaytitle": "", } ], "Sort title": ["al-Fawaid al-Ḥairiyya"], "Title (Arabic)": ["الفوائد الحائرية"], "Title (transliterated)": ["al-Fawāʾid al-Ḥāʾiriyya"], "Year (Gregorian)": [], "Year (Gregorian) text": [], "Year (Hijri)": [], "Year (Hijri) text": [], "page_name": "Edition:al-Fawāʾid al-Ḥāʾiriyya (1415/1994)", }, ] def create_expected_latex(transliterated_title: str, arabic_title: str) -> str: return f""" \\item \\textbf{{{transliterated_title}}} {arabic_title} \\newline Muḥammad b. Faraj al-Ḥimyarī al-Najafī \\newline (1059/1649) \\newline \\newline \\textbf{{Description}} \\newline A short one-line description. \\newline \\newline \\textbf{{Principle manuscripts}} \\newline This manuscript \\newline \\newline \\textbf{{Editions}} \\newline This edition. \\newline \\newline \\textbf{{Commentaries}} \\newline This commentary. \\newline """ """ def test_convert_to_entry(): entry_values = list(dict_of_entries.values())[0]["printouts"] expected = create_expected_latex("(Bahth fī) uṣūl al-fiqh", "بحث في) أصول الفقه)") assert convert._make_entry(entry_values) == expected """ def test_create_dates(): entry = { "Death (Hijri)": [1299], "Death (Gregorian)": [{"timestamp": "-2776982400", "raw": "1/1882"}], "Death (Hijri) text": ["13th century"], "Death (Gregorian) text": ["19th century"], } assert convert._create_dates(entry) == "(13th century/19th century)" def test_make_editions(): assert ( convert._make_editions_section(edition) == """ \\textbf{Editions}\n\\begin{itemize} \\item \\end{itemize}\n """ )
30.418919
123
0.506886
import tublatexmaker.latex_creater as convert dict_of_entries = { "(Bahth fī) uṣūl al-fiqh": { "displaytitle": "", "exists": "1", "fulltext": "(Bahth fī) uṣūl al-fiqh", "fullurl": "http://144.173.140.108:8080/tub/index.php/(Bahth_f%C4%AB)_u%E1%B9%A3%C5%ABl_al-fiqh", "namespace": 0, "printouts": { "Title (Arabic)": ["بحث في) أصول " "الفقه)"], "Title (transliterated)": ["(Bahth " "fī) " "uṣūl " "al-fiqh"], "Has author(s)": [{"fulltext": "Muḥammad Jawād b. Aḥmad"}], "Death (Hijri)": [1299], "Death (Gregorian)": [{"timestamp": "-2776982400", "raw": "1/1882"}], "Death (Hijri) text": ["13th century"], "Death (Gregorian) text": ["19th century"], }, } } edition = [ { "City": [ { "fulltext": "Qum", "fullurl": "http://144.173.140.108:8080/tub/index.php/Qum", "namespace": 0, "exists": "1", "displaytitle": "Qom", } ], "Edition type": ["Modern print"], "Has a publisher": ["Majmaʿ al-Fikr al-Islāmī"], "Has editor(s)": ["unknown"], "Published edition of title": [ { "fulltext": "al-Fawāʾid al-Ḥāʾiriyya", "fullurl": "http://144.173.140.108:8080/tub/index.php/al-Faw%C4%81%CA%BEid_al-%E1%B8%A4%C4%81%CA%BEiriyya", "namespace": 0, "exists": "1", "displaytitle": "", } ], "Sort title": ["al-Fawaid al-Ḥairiyya"], "Title (Arabic)": ["الفوائد الحائرية"], "Title (transliterated)": ["al-Fawāʾid al-Ḥāʾiriyya"], "Year (Gregorian)": [], "Year (Gregorian) text": [], "Year (Hijri)": [], "Year (Hijri) text": [], "page_name": "Edition:al-Fawāʾid al-Ḥāʾiriyya", }, { "City": [ { "fulltext": "Qum", "fullurl": "http://144.173.140.108:8080/tub/index.php/Qum", "namespace": 0, "exists": "1", "displaytitle": "Qom", } ], "Edition type": ["Modern print"], "Has a publisher": ["Majmaʿ al-Fikr al-Islāmī"], "Has editor(s)": ["unknown"], "Published edition of title": [ { "fulltext": "al-Fawāʾid al-Ḥāʾiriyya", "fullurl": "http://144.173.140.108:8080/tub/index.php/al-Faw%C4%81%CA%BEid_al-%E1%B8%A4%C4%81%CA%BEiriyya", "namespace": 0, "exists": "1", "displaytitle": "", } ], "Sort title": ["al-Fawaid al-Ḥairiyya"], "Title (Arabic)": ["الفوائد الحائرية"], "Title (transliterated)": ["al-Fawāʾid al-Ḥāʾiriyya"], "Year (Gregorian)": [], "Year (Gregorian) text": [], "Year (Hijri)": [], "Year (Hijri) text": [], "page_name": "Edition:al-Fawāʾid al-Ḥāʾiriyya (1415/1994)", }, ] def create_expected_latex(transliterated_title: str, arabic_title: str) -> str: return f""" \\item \\textbf{{{transliterated_title}}} {arabic_title} \\newline Muḥammad b. Faraj al-Ḥimyarī al-Najafī \\newline (1059/1649) \\newline \\newline \\textbf{{Description}} \\newline A short one-line description. \\newline \\newline \\textbf{{Principle manuscripts}} \\newline This manuscript \\newline \\newline \\textbf{{Editions}} \\newline This edition. \\newline \\newline \\textbf{{Commentaries}} \\newline This commentary. \\newline """ def test_create_dates(): entry = { "Death (Hijri)": [1299], "Death (Gregorian)": [{"timestamp": "-2776982400", "raw": "1/1882"}], "Death (Hijri) text": ["13th century"], "Death (Gregorian) text": ["19th century"], } assert convert._create_dates(entry) == "(13th century/19th century)" def test_make_editions(): assert ( convert._make_editions_section(edition) == """ \\textbf{Editions}\n\\begin{itemize} \\item \\end{itemize}\n """ )
true
true
f70413c4323d2f687002c19fdbba7cf96105d340
1,116
py
Python
icclim/user_indices/stat.py
larsbarring/icclim
f3685c77a1a3aaff58b0d05609380c9387e9aa99
[ "Apache-2.0" ]
null
null
null
icclim/user_indices/stat.py
larsbarring/icclim
f3685c77a1a3aaff58b0d05609380c9387e9aa99
[ "Apache-2.0" ]
null
null
null
icclim/user_indices/stat.py
larsbarring/icclim
f3685c77a1a3aaff58b0d05609380c9387e9aa99
[ "Apache-2.0" ]
null
null
null
from typing import Sequence import numpy as np import xarray from xarray import DataArray from xclim.indices.run_length import rle_1d def get_longest_run_start_index( arr: DataArray, window: int = 1, dim: str = "time", ) -> DataArray: return xarray.apply_ufunc( get_index_of_longest_run, arr, input_core_dims=[[dim]], kwargs={"window": window}, vectorize=True, dask="parallelized", output_dtypes=[float], ) def get_index_of_longest_run(arr: Sequence[bool], window: int = 1) -> int: values, rl, pos = rle_1d(arr) if not np.any(values) or np.all(values * rl < window): # type:ignore return 0 index_of_max = np.nanargmax( np.where(values * rl >= window, rl, np.NaN) # type:ignore ) return pos[index_of_max] # type:ignore def get_first_occurrence_index(da: DataArray) -> DataArray: """ Return the index of the first True value in the 3D booleans array along time dimension. """ stacked = da.stack(latlon=("lat", "lon")) res = stacked.argmax("time") return res.unstack()
25.953488
75
0.646057
from typing import Sequence import numpy as np import xarray from xarray import DataArray from xclim.indices.run_length import rle_1d def get_longest_run_start_index( arr: DataArray, window: int = 1, dim: str = "time", ) -> DataArray: return xarray.apply_ufunc( get_index_of_longest_run, arr, input_core_dims=[[dim]], kwargs={"window": window}, vectorize=True, dask="parallelized", output_dtypes=[float], ) def get_index_of_longest_run(arr: Sequence[bool], window: int = 1) -> int: values, rl, pos = rle_1d(arr) if not np.any(values) or np.all(values * rl < window): return 0 index_of_max = np.nanargmax( np.where(values * rl >= window, rl, np.NaN) ) return pos[index_of_max] def get_first_occurrence_index(da: DataArray) -> DataArray: stacked = da.stack(latlon=("lat", "lon")) res = stacked.argmax("time") return res.unstack()
true
true
f70416665959ee4ad95fac5c75f5677bc388a177
419
py
Python
explorer/notes/migrations/0002_auto_20191120_0948.py
UPstartDeveloper/explorer_buddy
467fa77307a588645e7a9fd269ae13b6b24d4efc
[ "MIT" ]
null
null
null
explorer/notes/migrations/0002_auto_20191120_0948.py
UPstartDeveloper/explorer_buddy
467fa77307a588645e7a9fd269ae13b6b24d4efc
[ "MIT" ]
22
2019-12-05T01:10:16.000Z
2022-03-12T00:06:51.000Z
explorer/notes/migrations/0002_auto_20191120_0948.py
UPstartDeveloper/explorer_buddy
467fa77307a588645e7a9fd269ae13b6b24d4efc
[ "MIT" ]
null
null
null
# Generated by Django 2.2.7 on 2019-11-20 17:48 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('notes', '0001_initial'), ] operations = [ migrations.AlterField( model_name='note', name='media', field=models.ImageField(help_text='Optional image to add to note.', upload_to='media'), ), ]
22.052632
99
0.599045
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('notes', '0001_initial'), ] operations = [ migrations.AlterField( model_name='note', name='media', field=models.ImageField(help_text='Optional image to add to note.', upload_to='media'), ), ]
true
true
f70416bbb2afaed37a9719131b77e72a373efab2
3,366
py
Python
lpot/data/dataloaders/sampler.py
daisyden/lpot
d8709bb73ce13cfc0fd760845e0be40af22f5a45
[ "Apache-2.0" ]
null
null
null
lpot/data/dataloaders/sampler.py
daisyden/lpot
d8709bb73ce13cfc0fd760845e0be40af22f5a45
[ "Apache-2.0" ]
null
null
null
lpot/data/dataloaders/sampler.py
daisyden/lpot
d8709bb73ce13cfc0fd760845e0be40af22f5a45
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from abc import abstractmethod class Sampler(object): """Base class for all Samplers. __iter__ is needed no matter whether you use IterableSampler or Squential sampler, if you want implement your own sampler, make clear what the type is your Dataset, if IterableDataset(method __iter__ implemented), try to use IterableSampler, else if you have an IndexDataset(method __getitem__ implemented), your dataset should have method __len__ implemented. """ def __init__(self, data_source): pass @abstractmethod def __iter__(self): raise NotImplementedError class IterableSampler(Sampler): """Interally samples elements, used for datasets retrieved element by interator. yield None to act as a placeholder for each iteration Args: dataset (Dataset): set to None """ def __init__(self): super(IterableSampler, self).__init__(None) def __iter__(self): while True: yield None def __len__(self): return 0 class SequentialSampler(Sampler): """Sequentially samples elements, used for datasets retrieved element by index. Args: dataset (Dataset): index dataset(implement method __len__) for sampling """ def __init__(self, dataset): self.dataset = dataset def __iter__(self): return iter(range(len(self.dataset))) def __len__(self): return len(self.dataset) class BatchSampler(Sampler): """yield a mini-batch of indices for SquentialSampler and batch size length of None list for IterableSampler. Args: sampler (Sampler): sampler used for generating batches. batch_size (int): Size of mini-batch. drop_last (bool): BatchSampler will drop the last batch if drop_last is True, else will return the last batch whose size will be less than batch_size """ def __init__(self, sampler, batch_size, drop_last=True): if isinstance(drop_last, bool): self.drop_last = drop_last else: raise ValueError("last_batch only support bool as input") self.sampler = sampler self.batch_size = batch_size self.drop_last = drop_last def __iter__(self): batch = [] for idx in self.sampler: batch.append(idx) if len(batch) == self.batch_size: yield batch batch = [] if len(batch) > 0 and not self.drop_last: yield batch def __len__(self): if self.drop_last: return len(self.sampler) // self.batch_size else: return (len(self.sampler) + self.batch_size - 1) // self.batch_size
30.324324
97
0.663102
from abc import abstractmethod class Sampler(object): def __init__(self, data_source): pass @abstractmethod def __iter__(self): raise NotImplementedError class IterableSampler(Sampler): def __init__(self): super(IterableSampler, self).__init__(None) def __iter__(self): while True: yield None def __len__(self): return 0 class SequentialSampler(Sampler): def __init__(self, dataset): self.dataset = dataset def __iter__(self): return iter(range(len(self.dataset))) def __len__(self): return len(self.dataset) class BatchSampler(Sampler): def __init__(self, sampler, batch_size, drop_last=True): if isinstance(drop_last, bool): self.drop_last = drop_last else: raise ValueError("last_batch only support bool as input") self.sampler = sampler self.batch_size = batch_size self.drop_last = drop_last def __iter__(self): batch = [] for idx in self.sampler: batch.append(idx) if len(batch) == self.batch_size: yield batch batch = [] if len(batch) > 0 and not self.drop_last: yield batch def __len__(self): if self.drop_last: return len(self.sampler) // self.batch_size else: return (len(self.sampler) + self.batch_size - 1) // self.batch_size
true
true
f704175b0bd869defb393f07da425d180b3841da
33,040
py
Python
mytrading/utils/bettingdb.py
joeledwardson/betfair-browser
b641f134e60307250a0e51bafa849422ecf5264b
[ "MIT" ]
3
2021-11-23T19:03:02.000Z
2021-11-24T08:44:23.000Z
mytrading/utils/bettingdb.py
joeledwardson/betfair-browser
b641f134e60307250a0e51bafa849422ecf5264b
[ "MIT" ]
2
2021-11-23T18:47:31.000Z
2021-12-08T15:36:11.000Z
mytrading/utils/bettingdb.py
joeledwardson/betfair-browser
b641f134e60307250a0e51bafa849422ecf5264b
[ "MIT" ]
null
null
null
from __future__ import annotations import shutil from betfairlightweight.resources.streamingresources import MarketDefinition from betfairlightweight.resources.bettingresources import MarketCatalogue, MarketBook from betfairlightweight.streaming.listener import StreamListener import sqlalchemy from sqlalchemy.sql.expression import ColumnElement from sqlalchemy.sql.selectable import CTE from sqlalchemy import create_engine, func, DECIMAL from sqlalchemy.orm import Session from sqlalchemy.sql.schema import Table from sqlalchemy.ext.automap import automap_base from sqlalchemy.dialects.postgresql import base as psqlbase from sqlalchemy.dialects.postgresql import json as psqljson from sqlalchemy.sql.functions import sum as sql_sum from sqlalchemy_filters.filters import Operator as SqlOperator from sqlalchemy.orm.query import Query from queue import Queue import logging from typing import Optional, Dict, List, Callable, Any, Tuple, Union, Literal, TypedDict from os import path import os from datetime import datetime, timedelta import zlib import yaml import json import sys import dateparser from myutils import dictionaries, registrar from ..exceptions import DBException from .dbfilter import DBFilterHandler active_logger = logging.getLogger(__name__) active_logger.setLevel(logging.INFO) ProcessorKey = Literal['process_in', 'process_out', 'processors'] ProcessorMap = Dict[type, Dict[ProcessorKey, List[str]]] Processor = Callable[[Any], Any] db_processors = registrar.Registrar[Processor]() DB_PROCESSORS: ProcessorMap = { psqlbase.BYTEA: { 'process_in': [ 'prc_compress' ], 'process_out': [ 'prc_decompress', ] }, } CACHE_PROCESSORS: ProcessorMap = { psqlbase.BYTEA: { 'process_in': [ 'prc_str_encode', ], 'process_out': [ 'prc_str_decode' ] }, psqlbase.TIMESTAMP: { 'process_in': [ 'prc_dt_from_str', ], 'process_out': [ 'prc_dt_to_str' ] }, psqlbase.INTERVAL: { 'process_in': [ 'prc_td_from_float', ], 'process_out': [ 'prc_td_to_float' ] }, psqljson.JSON: { 'process_in': [ 'prc_json_decode', ], 'process_out': [ 'prc_json_encode' ] } } @db_processors.register_element def prc_str_to_dt(data): return dateparser.parse(data, settings={'DATE_ORDER': 'DMY'}) # use UK day-month-year instead of US month-day-year @db_processors.register_element def prc_compress(data): return zlib.compress(data) @db_processors.register_element def prc_decompress(data): return zlib.decompress(data) @db_processors.register_element def prc_str_encode(data): return data.encode() @db_processors.register_element def prc_str_decode(data): return data.decode() @db_processors.register_element def prc_td_to_float(data: timedelta): return data.total_seconds() @db_processors.register_element def prc_td_from_float(data): return timedelta(seconds=data) @db_processors.register_element def prc_dt_from_str(data): return datetime.fromisoformat(data) @db_processors.register_element def prc_dt_to_str(data): return data.isoformat() @db_processors.register_element def prc_json_encode(data): return json.dumps(data) @db_processors.register_element def prc_json_decode(data): return json.loads(data) class DBBase: def __init__( self, db_lang=None, db_user=None, db_host=None, db_port=None, db_name=None, db_pwd=None, db_engine=None, col_processors=None, engine_kwargs=None ): self.col_prcs = col_processors or DB_PROCESSORS self.Base = automap_base() engine_kwargs = engine_kwargs or {} # TODO - remove? engine_str = f'+{db_engine}' if db_engine else '' url = f'{db_lang}{engine_str}://{db_user}:{db_pwd}@{db_host}:{db_port}/{db_name}' # prioritise engine kwargs if provided - "url" key will override constructed if provided engine_kwargs = {'url': url} | engine_kwargs active_logger.info(f'connecting to database with kwargs:\n{engine_kwargs}') self.engine = create_engine(**engine_kwargs) self.Base.prepare(self.engine, reflect=True) self.session = Session(self.engine) self.tables: Dict[str, Table] = self.Base.metadata.tables active_logger.info(f'tables found: {list(self.tables.keys())}') def _validate_tbl(self, tbl_name: str): if tbl_name not in self.tables: raise DBException(f'error inserting row, table "{tbl_name}" not found in tables') if tbl_name not in self.Base.classes: raise DBException(f'error inserting row, table "{tbl_name}" not found in base') def _validate_cols(self, tbl_name: str, cols: List[str]): for col in cols: if col not in self.tables[tbl_name].columns: raise DBException(f'column "{col}" not found in table "{tbl_name}"') def _validate_pkeys(self, tbl_nm: str, pkey_flts: Dict): tbl_pkeys = tuple(x.name for x in self.tables[tbl_nm].primary_key) flt_pkeys = tuple(pkey_flts.keys()) if tbl_pkeys != flt_pkeys: raise DBException( f'error writing cache, table primary keys "{tbl_pkeys}" does not match specified "{flt_pkeys}"' ) def apply_basic_filters(self, tbl_nm: str, pkey_flts: Dict) -> Query: return self.session.query(self.tables[tbl_nm]).filter( *[self.tables[tbl_nm].columns[k] == v for k, v in pkey_flts.items()] ) def row_exist(self, tbl_nm: str, pkey_flts: Dict) -> bool: """ Determine if row(s) exist in database for a given table """ return self.apply_basic_filters(tbl_nm, pkey_flts).count() >= 1 def _value_processors(self, value: Any, tbl_name: str, col: str, prcs: ProcessorMap, prc_type: ProcessorKey) -> Any: col_type = type(self.tables[tbl_name].columns[col].type) prc_nms = prcs.get(col_type, {}).get(prc_type) if prc_nms: if type(prc_nms) is not list: raise DBException(f'processors "{prc_type}" for column "{col}" not list') for i, prc_nm in enumerate(prc_nms): prc_func = db_processors[prc_nm] active_logger.info(f'running processor "{prc_type}" #{i}, "{prc_nm}" on column "{col}"') value_out = prc_func(value) value = value_out return value def _process_columns(self, data: Dict, tbl_name: str, prcs: ProcessorMap, prc_type: ProcessorKey) -> None: self._validate_tbl(tbl_name) self._validate_cols(tbl_name, list(data.keys())) for col in data.keys(): val_in = data[col] if val_in is None: active_logger.warning(f'table "{tbl_name}", col "{col}" value is None, skipping processing') else: val_out = self._value_processors(val_in, tbl_name, col, prcs, prc_type) data[col] = val_out def insert_row(self, tbl_name: str, data: Dict): active_logger.info(f'inserting row of information into table "{tbl_name}"') active_logger.info(f'keys passed are:\n' f'{yaml.dump([str(k) for k in data.keys()])}') self._process_columns(data, tbl_name, self.col_prcs, 'process_in') row = self.Base.classes[tbl_name](**data) self.session.add(row) self.session.commit() def read_rows(self, tbl_nm: str, pkey_flts: Dict) -> List[Dict]: active_logger.info(f'reading rows from table "{tbl_nm}" with filter "{pkey_flts}"') self._validate_tbl(tbl_nm) self._validate_pkeys(tbl_nm, pkey_flts) if not self.row_exist(tbl_nm, pkey_flts): raise DBException(f'row in table "{tbl_nm}" with filters "{pkey_flts}" does not exist') sql_rows = self.apply_basic_filters(tbl_nm, pkey_flts).all() rows = [] for row in sql_rows: row_dict = { str(k): v for k, v in dict(row).items() } # convert sqlalchemy key objects to str for yaml self._process_columns(row_dict, tbl_nm, self.col_prcs, 'process_out') rows.append(row_dict) return rows def read_row(self, tbl_nm: str, pkey_flts: Dict) -> Dict: rows = self.read_rows(tbl_nm, pkey_flts) if len(rows) != 1: raise DBException(f'expected 1 row from table "{tbl_nm}" with filters "{pkey_flts}", got {len(rows)}') return rows[0] def delete_rows(self, tbl_nm: str, pkey_flts: Dict) -> int: active_logger.info(f'deleting rows from table "{tbl_nm}" with filters: "{pkey_flts}"') q = self.apply_basic_filters(tbl_nm, pkey_flts) ret = q.delete(synchronize_session='fetch') self.session.commit() return ret def order_query(self, query: Query, cols, order_col: str, order_asc: bool): """apply ordering based on column of cte""" if order_col not in cols: raise DBException(f'cannot order by column "{order_col}", does not exist in CTE') order_func = sqlalchemy.asc if order_asc else sqlalchemy.desc return query.order_by(order_func(cols[order_col])) class DBCache(DBBase): def __init__(self, cache_root, cache_processors=None, **kwargs): super().__init__(**kwargs) self.cache_root = path.abspath(path.expandvars(cache_root)) if not path.isdir(self.cache_root): active_logger.info(f'creating cache root directory at: "{self.cache_root}"') os.makedirs(self.cache_root) else: active_logger.info(f'existing cache root directory found at: "{self.cache_root}"') self.cache_prcs = cache_processors or CACHE_PROCESSORS def cache_tbl(self, tbl_nm) -> str: return path.join(self.cache_root, tbl_nm) def cache_dir(self, tbl_nm: str, pkey_flts: Dict) -> str: return path.join(self.cache_tbl(tbl_nm), *pkey_flts.values()) def cache_col(self, tbl_nm: str, pkey_flts: Dict, col: str) -> str: return path.join(self.cache_dir(tbl_nm, pkey_flts), col) def clear_cache(self, tbl_nm: str, pkey_flts: Dict): active_logger.info(f'clearing cache from table "{tbl_nm}" with filters "{pkey_flts}"') p = self.cache_dir(tbl_nm, pkey_flts) if not path.exists(p): active_logger.info(f'path "{p}" does not exist, skipping') else: if not path.isdir(p): raise DBException(f'path "{p}" is not a directory') active_logger.info(f'removing cache dir: "{p}"') os.rmdir(p) def write_to_cache(self, tbl_nm: str, pkey_flts: Dict, data: Dict): self._validate_pkeys(tbl_nm, pkey_flts) self._validate_tbl(tbl_nm) d = self.cache_dir(tbl_nm, pkey_flts) active_logger.info(f'writing cache to path: "{d}"') if path.exists(d): active_logger.info('path already exists, exiting...') return os.makedirs(d, exist_ok=True) self._process_columns(data, tbl_nm, self.cache_prcs, 'process_out') for k in pkey_flts.keys(): data.pop(k, None) for col in data.keys(): if data[col] is None: active_logger.warning(f'column "{col}" value is none, skipping') else: p = self.cache_col(tbl_nm, pkey_flts, col) active_logger.info(f'writing column "{col}" to file: "{p}"') with open(p, 'w') as f: f.write(data[col]) def read_to_cache(self, tbl_nm: str, pkey_flts: Dict): active_logger.info(f'reading table "{tbl_nm}" row to cache with filters "{pkey_flts}"') data = self.read_row(tbl_nm, pkey_flts) self.write_to_cache(tbl_nm, pkey_flts, data) def insert_from_cache(self, tbl_nm, pkey_flts: Dict): active_logger.info(f'insert row to table "{tbl_nm}" from cache with filters "{pkey_flts}"') self._validate_pkeys(tbl_nm, pkey_flts) self._validate_tbl(tbl_nm) d = self.cache_dir(tbl_nm, pkey_flts) active_logger.info(f'getting files from cache directory: "{d}"') if not path.isdir(d): raise DBException(f'expected to be directory: "{d}"') data = pkey_flts.copy() _, _, files = next(os.walk(d)) self._validate_cols(tbl_nm, files) # files should match column names for fnm in files: fp = self.cache_col(tbl_nm, pkey_flts, fnm) active_logger.info(f'reading column data from file: "{fp}"') with open(fp, 'r') as f: data[fnm] = f.read() self._process_columns(data, tbl_nm, self.cache_prcs, 'process_in') self.insert_row(tbl_nm, data) def _cache_pkeys(self, tbl_nm: str): """ get list of primary key filters from nested dirs in cache """ pkey_names = tuple(x.name for x in self.tables[tbl_nm].primary_key) def _get_pkeys(_dir: str, _base_pkey: Dict, _lvl) -> List: if not path.isdir(_dir): return [] _, dirnames, _ = next(os.walk(_dir)) return [_base_pkey | {pkey_names[_lvl]: d} for d in dirnames] lvl = 0 flts = [{}] while lvl < len(pkey_names): flts_out = [] for f in flts: d = self.cache_dir(tbl_nm, f) flts_out += _get_pkeys(d, f, lvl) flts = flts_out lvl += 1 return flts def scan_cache(self, tbl_nm: str, post_insert: Optional[Callable[[str, Dict], None]] = None) -> List[Dict]: tbl_root = self.cache_tbl(tbl_nm) active_logger.info(f'scanning for cached rows for table "{tbl_nm}" to insert in "{tbl_root}"') flts = self._cache_pkeys(tbl_nm) added_pkeys = [] for pkey_filters in flts: if self.row_exist(tbl_nm, pkey_filters): active_logger.info(f'row "{pkey_filters}" already exists in database, skipping...') else: self.insert_from_cache(tbl_nm, pkey_filters) added_pkeys.append(pkey_filters) if post_insert is not None: post_insert(tbl_nm, pkey_filters) return added_pkeys def wipe_cache(self) -> Tuple[int, int]: active_logger.info(f'clearing cache root at "{self.cache_root}"') _, dirnames, filenames = next(os.walk(self.cache_root)) for fnm in filenames: p = path.join(self.cache_root, fnm) os.remove(p) for dnm in dirnames: p = path.join(self.cache_root, dnm) shutil.rmtree(p) return len(filenames), len(dirnames) class QueryFilter(TypedDict): value: object field: str op: str def apply_filter_spec(tbl: Table, q: Query, filters_spec: List[QueryFilter]) -> Query: """sqlalchemy_filters `apply_filters` function doesn't work with Sqlalchemy V1.14 so i've bodged it myself until they sort it out""" conditions = [ SqlOperator.OPERATORS[f['op']](tbl.columns[f['field']], f['value']) for f in filters_spec ] return q.filter(*conditions) class BettingDB: """ Betting database handler Manages session that connects to remote SQL ase for querying "Historic" markets to are files downloaded directly from betfair's historical data website "Recorded" markets are files from betfair markets recorded through a python script locally, which are recorded with the accompanying market catalogue file """ def __init__(self, **kwargs): self._dbc = DBCache(**kwargs) def read(self, tbl_nm: str, pkey_flts: Dict): return self._dbc.read_row(tbl_nm, pkey_flts) def close(self): self._dbc.session.close() def meta_serialise(self, market_info: Dict) -> None: """run caching serialisation on market information retrieved from 'marketmeta' database""" self._dbc._process_columns(market_info, 'marketmeta', self._dbc.cache_prcs, 'process_out') def meta_de_serialise(self, market_info: Dict) -> None: """run caching de-serialisation on market information that has been serialised""" self._dbc._process_columns(market_info, 'marketmeta', self._dbc.cache_prcs, 'process_in') @staticmethod def get_meta(first_book: MarketBook, cat: MarketCatalogue = None) -> Dict: """ Get metadata corresponding to the "Meta" table in the betting database for a given betfair Market Parameters ---------- first_book : first MarketBook for betfair Market cat : if market is recorded and not historic, this needs to be passed to get venue and runner names Returns dict of metadata ------- """ mktdef: MarketDefinition = first_book.market_definition mktid = first_book.market_id init_time = first_book.publish_time pre_off = mktdef.market_time - init_time metadata = { 'market_id': mktid, 'sport_id': mktdef.event_type_id, 'market_time': mktdef.market_time, 'market_type': mktdef.market_type, 'betting_type': mktdef.betting_type, 'country_code': mktdef.country_code, 'event_id': mktdef.event_id, 'event_name': mktdef.event_name, # historical 'timezone': mktdef.timezone, 'venue': mktdef.venue, 'init_time': init_time, 'pre_off': pre_off, 'format': 'historic', } if cat is not None: metadata['event_name'] = cat.event.name metadata['venue'] = cat.event.venue metadata['format'] = 'recorded' return metadata @staticmethod def get_first_book(file_path: str) -> Optional[MarketBook]: """ read the first line in a historical/streaming file and get the MarketBook parsed object, without reading or processing the rest of the file """ with open(file_path) as f: l = f.readline() q = Queue() # stop it winging about stream latency by using infinity as max latency listener = StreamListener(q, max_latency=sys.float_info.max) listener.register_stream(0, 'marketSubscription') listener.on_data(l) return listener.output_queue.get()[0] def insert_market_meta(self, market_id: str): active_logger.info(f'creating metadata database entry for market "{market_id}"') pkey_flts = {'market_id': market_id} self._dbc.read_to_cache('marketstream', pkey_flts) stream_path = self._dbc.cache_col('marketstream', pkey_flts, 'stream_updates') bk = self.get_first_book(stream_path) cat = None cat_path = self._dbc.cache_col('marketstream', pkey_flts, 'catalogue') if path.exists(cat_path): if path.getsize(cat_path): with open(cat_path, 'r') as f: cat_dict = json.loads(f.read()) try: cat = MarketCatalogue(**cat_dict) except TypeError as e: raise DBException(f'failed to create market catalogue: {e}') if cat is None: names = {r.selection_id: r.name for r in bk.market_definition.runners} else: names = {r.selection_id: r.runner_name for r in cat.runners} for runner_id, name in names.items(): active_logger.info(f'creating row for market "{market_id}", runner "{runner_id}", name "{name}"') self._dbc.insert_row('marketrunners', { 'market_id': market_id, 'runner_id': runner_id, 'runner_name': name }) meta_data = self.get_meta(bk, cat) self._dbc.insert_row('marketmeta', meta_data) def insert_strategy_runners(self, pkey_filters, profit_func: Callable[[str], Dict]): p = self._dbc.cache_col('strategyupdates', pkey_filters, 'strategy_updates') if not path.isfile(p): raise DBException(f'expected strategy update file at "{p}"') runner_profits = profit_func(p) for k, v in runner_profits.items(): self._dbc.insert_row('strategyrunners', pkey_filters | { 'runner_id': k, 'profit': v }) def wipe_cache(self) -> Tuple[int, int]: return self._dbc.wipe_cache() def scan_mkt_cache(self) -> List[Dict]: """ scan marketstream cache files - insert into database if not exist and add corresponding marketmeta and runner rows """ def mkt_post_insert(tbl_name, pkey_flts): if tbl_name != 'marketstream': raise DBException(f'expected "marketstream" table') self.insert_market_meta(pkey_flts['market_id']) return self._dbc.scan_cache('marketstream', mkt_post_insert) def scan_strat_cache(self, profit_func: Callable[[str], Dict]) -> List[Dict]: """ scan strategy cache files - insert into database if not exist """ def strat_post_insert(tbl_nm, pkey_flts): self.insert_strategy_runners(pkey_flts, profit_func) added_keys = self._dbc.scan_cache('strategymeta') self._dbc.scan_cache('strategyupdates', strat_post_insert) return added_keys def write_strat_info(self, strategy_id, type: str, name: str, exec_time: datetime, info: dict): data = { 'type': type, 'name': name, 'exec_time': exec_time, 'info': info } self._dbc.write_to_cache( tbl_nm='strategymeta', pkey_flts={ 'strategy_id': str(strategy_id) }, data=data ) def path_mkt_usr_updates(self, market_id) -> str: return self._dbc.cache_col( tbl_nm='marketstream', pkey_flts={ 'market_id': market_id }, col='user_data' ) def path_mkt_cat(self, market_id) -> str: return self._dbc.cache_col( tbl_nm='marketstream', pkey_flts={ 'market_id': market_id }, col='catalogue', ) def path_mkt_updates(self, market_id) -> str: return self._dbc.cache_col( tbl_nm='marketstream', pkey_flts={ 'market_id': market_id }, col='stream_updates', ) def path_strat_features(self, market_id, strategy_id) -> str: return self._dbc.cache_col( tbl_nm='strategyupdates', pkey_flts={ 'strategy_id': str(strategy_id), 'market_id': market_id, }, col='strategy_features' ) def path_strat_updates(self, market_id, strategy_id) -> str: return self._dbc.cache_col( tbl_nm='strategyupdates', pkey_flts={ 'strategy_id': str(strategy_id), 'market_id': market_id }, col='strategy_updates' ) def paths_market_updates(self, filter_spec: List[QueryFilter], limit=200): tbl = self._dbc.tables['marketmeta'] q = self._dbc.session.query(tbl) q_flt = apply_filter_spec(tbl, q, filter_spec) rows = q_flt.limit(limit).all() update_paths = [] for row in rows: mkt_flt = {'market_id': row.market_id} self._dbc.read_to_cache('marketstream', mkt_flt) p = self._dbc.cache_col('marketstream', mkt_flt, 'stream_updates') if not path.isfile(p): raise DBException(f'expected file at stream update path: "{p}"') update_paths.append(p) return update_paths def rows_runners(self, market_id, strategy_id) -> List[Dict]: """ get filters rows of runners, joined with profit column from strategy """ sr = self._dbc.tables['strategyrunners'] cte_strat = self._dbc.session.query( sr.columns['runner_id'], sr.columns['profit'].label('runner_profit') ).filter( sr.columns['strategy_id'] == strategy_id, sr.columns['market_id'] == market_id ).cte() rn = self._dbc.tables['marketrunners'] rows = self._dbc.session.query( rn, cte_strat.c['runner_profit'], ).join( cte_strat, rn.columns['runner_id'] == cte_strat.c['runner_id'], isouter=True, ).filter( rn.columns['market_id'] == market_id ).all() return [dict(row) for row in rows] def rows_market(self, cte, col_names, max_rows, order_col=None, order_asc=False) -> List[Dict]: cols = [cte.c[nm] for nm in col_names] q = self._dbc.session.query(*cols) if order_col is not None: q = self._dbc.order_query(q, cte.c, order_col, order_asc) rows = q.limit(max_rows).all() return [dict(row) for row in rows] # TODO - implement in UI def rows_strategy(self, max_rows) -> List[Dict]: shn = self._dbc.session sm = self._dbc.tables['strategymeta'] sr = self._dbc.tables['strategyrunners'] p_cte = shn.query( sr.columns['strategy_id'], func.sum(sr.columns['profit']).label('total_profit') ).group_by(sr.columns['strategy_id']).cte() m_cte = shn.query(sr.c['strategy_id'], sr.c['market_id']).distinct().cte() m_cte = shn.query( m_cte.c['strategy_id'], func.count(m_cte.c['market_id']).label('n_markets') ).group_by(m_cte.c['strategy_id']).cte() q = shn.query(sm, p_cte.c['total_profit'], m_cte.c['n_markets']).join( p_cte, sm.c['strategy_id'] == p_cte.c['strategy_id'], isouter=True ).join( m_cte, sm.c['strategy_id'] == m_cte.c['strategy_id'], isouter=True ) return [dict(row) for row in q.limit(max_rows).all()] def filters_labels(self, filters: DBFilterHandler, cte) -> List[List[Dict[str, Any]]]: return filters.filters_labels(self._dbc.session, self._dbc.tables, cte) def cte_count(self, cte: CTE) -> int: return self._dbc.session.query(cte).count() def strategy_count(self) -> int: return self._dbc.session.query(self._dbc.tables['strategymeta']).count() def strategy_delete(self, strategy_id) -> Tuple[int, int ,int]: strategy_id = str(strategy_id) active_logger.info(f'attempting to delete strategy: "{strategy_id}"') pkey_flt = {'strategy_id': strategy_id} if not self._dbc.row_exist('strategymeta', pkey_flt): raise DBException(f'strategy does not exist, using filters: "{pkey_flt}"') if not strategy_id: raise DBException(f'trying to delete strategy where ID passed is blank!') rows = self._dbc.read_rows('strategymeta', pkey_flt) if len(rows) != 1: raise DBException(f'expected 1 strategy meta row with filter: "{pkey_flt}"') n_runners = self._dbc.delete_rows('strategyrunners', pkey_flt) active_logger.info(f'deleted {n_runners} rows from "strategyrunners" table') n_mkts = self._dbc.delete_rows('strategyupdates', pkey_flt) active_logger.info(f'deleted {n_mkts} rows from "strategyupdates" table') n_meta = self._dbc.delete_rows('strategymeta', pkey_flt) active_logger.info(f'deleted {n_meta} rows from "strategymeta" table') return n_meta, n_mkts, n_runners def filters_strat_cte(self, strat_filters: DBFilterHandler) -> CTE: """ get filtered database strategy common table expression (CTE) """ strat_meta = self._dbc.tables['strategymeta'] q = self._dbc.session.query(strat_meta).filter( *strat_filters.filters_conditions(strat_meta) ) return q.cte() def filters_mkt_cte(self, strategy_id, column_filters: List[ColumnElement]) -> CTE: meta = self._dbc.tables['marketmeta'] sr = self._dbc.tables['strategyrunners'] if strategy_id: strat_cte = self._dbc.session.query( sr.columns['market_id'], sql_sum(sr.columns['profit']).label('market_profit') ).filter( sr.columns['strategy_id'] == strategy_id ).group_by( sr.columns['market_id'] ).cte() q = self._dbc.session.query( meta, strat_cte.c['market_profit'] ).join( strat_cte, meta.columns['market_id'] == strat_cte.c['market_id'] ) else: q = self._dbc.session.query( meta, sqlalchemy.null().label('market_profit') ) q = q.filter(*column_filters) return q.cte() def cache_strat_updates(self, strategy_id, market_id): pkey_flts = { 'strategy_id': str(strategy_id), 'market_id': market_id } self._dbc.read_to_cache('strategyupdates', pkey_flts) def cache_strat_meta(self, strategy_id): pkey_flt = {'strategy_id': strategy_id} self._dbc.read_to_cache('strategymeta', pkey_flt) def cache_mkt_stream(self, market_id): pkey_flt = {'market_id': market_id} self._dbc.read_to_cache('marketstream', pkey_flt) def read_mkt_meta(self, market_id) -> Dict: pkey_flt = {'market_id': market_id} return self._dbc.read_row('marketmeta', pkey_flt) def _lost_ids(self, t1: Table, t2, id_col: str): """ get a query for where table `t1` has rows that are not reflected in table `t2`, joined by a column with name specified by `id_col`. table `t2` can be a 1-to-1 mapping of rows from `t1` or 1 to many. E.g. if `t1` had an id column of 'sample_id_col' and some values [1,2,3], and `t2` had hundreds of rows but only with 'sample_id_col' equal to 1 or 2, then the function would return the 'sample_id_col' value of 3 """ cte = self._dbc.session.query( t2.columns[id_col] ).group_by(t2.columns[id_col]).cte() return self._dbc.session.query( t1.columns[id_col], cte.c[id_col] ).join( cte, t1.columns[id_col] == cte.c[id_col], isouter=True ).filter(cte.c[id_col] == None) def health_check(self): mkt_stm = self._dbc.tables['marketstream'] mkt_met = self._dbc.tables['marketmeta'] mkt_run = self._dbc.tables['marketrunners'] # market stream/meta row counts n_mkt = self._dbc.session.query(mkt_stm).count() active_logger.info(f'{n_mkt} market stream rows') n_met = self._dbc.session.query(mkt_met).count() active_logger.info(f'{n_met} market meta rows') # market stream rows without corresponding market meta row q = self._lost_ids(mkt_stm, mkt_met, 'market_id') for row in q.all(): active_logger.error(f'market "{row[0]}" does not have a meta row') # market runner meta row count nrun = self._dbc.session.query(mkt_run).count() active_logger.info(f'{nrun} market runner rows') # market stream rows without any corresponding runner rows q = self._lost_ids(mkt_stm, mkt_run, 'market_id') for row in q.all(): active_logger.error(f'market "{row[0]}" does not have any runner rows') srt_met = self._dbc.tables['strategymeta'] srt_run = self._dbc.tables['strategyrunners'] srt_udt = self._dbc.tables['strategyupdates'] # strategy meta & strategy market update row counts n_srtmet = self._dbc.session.query(srt_met).count() active_logger.info(f'{n_srtmet} strategy meta rows found') n_srtudt = self._dbc.session.query(srt_udt).count() active_logger.info(f'{n_srtudt} strategy market update rows found') # strategy meta rows without any strategy update rows q = self._lost_ids(srt_met, srt_udt, 'strategy_id') for row in q.all(): active_logger.error(f'strategy "{row[0]}" does not have any market updates') # strategy runner row count n_srtrun = self._dbc.session.query(srt_run).count() active_logger.info(f'{n_srtrun} strategy runner rows found') # strategy meta rows without any strategy runner rows q = self._lost_ids(srt_met, srt_run, 'strategy_id') for row in q.all(): active_logger.error(f'strategy "{row[0]}" does not have any runner rows')
38.553092
122
0.618341
from __future__ import annotations import shutil from betfairlightweight.resources.streamingresources import MarketDefinition from betfairlightweight.resources.bettingresources import MarketCatalogue, MarketBook from betfairlightweight.streaming.listener import StreamListener import sqlalchemy from sqlalchemy.sql.expression import ColumnElement from sqlalchemy.sql.selectable import CTE from sqlalchemy import create_engine, func, DECIMAL from sqlalchemy.orm import Session from sqlalchemy.sql.schema import Table from sqlalchemy.ext.automap import automap_base from sqlalchemy.dialects.postgresql import base as psqlbase from sqlalchemy.dialects.postgresql import json as psqljson from sqlalchemy.sql.functions import sum as sql_sum from sqlalchemy_filters.filters import Operator as SqlOperator from sqlalchemy.orm.query import Query from queue import Queue import logging from typing import Optional, Dict, List, Callable, Any, Tuple, Union, Literal, TypedDict from os import path import os from datetime import datetime, timedelta import zlib import yaml import json import sys import dateparser from myutils import dictionaries, registrar from ..exceptions import DBException from .dbfilter import DBFilterHandler active_logger = logging.getLogger(__name__) active_logger.setLevel(logging.INFO) ProcessorKey = Literal['process_in', 'process_out', 'processors'] ProcessorMap = Dict[type, Dict[ProcessorKey, List[str]]] Processor = Callable[[Any], Any] db_processors = registrar.Registrar[Processor]() DB_PROCESSORS: ProcessorMap = { psqlbase.BYTEA: { 'process_in': [ 'prc_compress' ], 'process_out': [ 'prc_decompress', ] }, } CACHE_PROCESSORS: ProcessorMap = { psqlbase.BYTEA: { 'process_in': [ 'prc_str_encode', ], 'process_out': [ 'prc_str_decode' ] }, psqlbase.TIMESTAMP: { 'process_in': [ 'prc_dt_from_str', ], 'process_out': [ 'prc_dt_to_str' ] }, psqlbase.INTERVAL: { 'process_in': [ 'prc_td_from_float', ], 'process_out': [ 'prc_td_to_float' ] }, psqljson.JSON: { 'process_in': [ 'prc_json_decode', ], 'process_out': [ 'prc_json_encode' ] } } @db_processors.register_element def prc_str_to_dt(data): return dateparser.parse(data, settings={'DATE_ORDER': 'DMY'}) @db_processors.register_element def prc_compress(data): return zlib.compress(data) @db_processors.register_element def prc_decompress(data): return zlib.decompress(data) @db_processors.register_element def prc_str_encode(data): return data.encode() @db_processors.register_element def prc_str_decode(data): return data.decode() @db_processors.register_element def prc_td_to_float(data: timedelta): return data.total_seconds() @db_processors.register_element def prc_td_from_float(data): return timedelta(seconds=data) @db_processors.register_element def prc_dt_from_str(data): return datetime.fromisoformat(data) @db_processors.register_element def prc_dt_to_str(data): return data.isoformat() @db_processors.register_element def prc_json_encode(data): return json.dumps(data) @db_processors.register_element def prc_json_decode(data): return json.loads(data) class DBBase: def __init__( self, db_lang=None, db_user=None, db_host=None, db_port=None, db_name=None, db_pwd=None, db_engine=None, col_processors=None, engine_kwargs=None ): self.col_prcs = col_processors or DB_PROCESSORS self.Base = automap_base() engine_kwargs = engine_kwargs or {} engine_str = f'+{db_engine}' if db_engine else '' url = f'{db_lang}{engine_str}://{db_user}:{db_pwd}@{db_host}:{db_port}/{db_name}' engine_kwargs = {'url': url} | engine_kwargs active_logger.info(f'connecting to database with kwargs:\n{engine_kwargs}') self.engine = create_engine(**engine_kwargs) self.Base.prepare(self.engine, reflect=True) self.session = Session(self.engine) self.tables: Dict[str, Table] = self.Base.metadata.tables active_logger.info(f'tables found: {list(self.tables.keys())}') def _validate_tbl(self, tbl_name: str): if tbl_name not in self.tables: raise DBException(f'error inserting row, table "{tbl_name}" not found in tables') if tbl_name not in self.Base.classes: raise DBException(f'error inserting row, table "{tbl_name}" not found in base') def _validate_cols(self, tbl_name: str, cols: List[str]): for col in cols: if col not in self.tables[tbl_name].columns: raise DBException(f'column "{col}" not found in table "{tbl_name}"') def _validate_pkeys(self, tbl_nm: str, pkey_flts: Dict): tbl_pkeys = tuple(x.name for x in self.tables[tbl_nm].primary_key) flt_pkeys = tuple(pkey_flts.keys()) if tbl_pkeys != flt_pkeys: raise DBException( f'error writing cache, table primary keys "{tbl_pkeys}" does not match specified "{flt_pkeys}"' ) def apply_basic_filters(self, tbl_nm: str, pkey_flts: Dict) -> Query: return self.session.query(self.tables[tbl_nm]).filter( *[self.tables[tbl_nm].columns[k] == v for k, v in pkey_flts.items()] ) def row_exist(self, tbl_nm: str, pkey_flts: Dict) -> bool: return self.apply_basic_filters(tbl_nm, pkey_flts).count() >= 1 def _value_processors(self, value: Any, tbl_name: str, col: str, prcs: ProcessorMap, prc_type: ProcessorKey) -> Any: col_type = type(self.tables[tbl_name].columns[col].type) prc_nms = prcs.get(col_type, {}).get(prc_type) if prc_nms: if type(prc_nms) is not list: raise DBException(f'processors "{prc_type}" for column "{col}" not list') for i, prc_nm in enumerate(prc_nms): prc_func = db_processors[prc_nm] active_logger.info(f'running processor "{prc_type}" #{i}, "{prc_nm}" on column "{col}"') value_out = prc_func(value) value = value_out return value def _process_columns(self, data: Dict, tbl_name: str, prcs: ProcessorMap, prc_type: ProcessorKey) -> None: self._validate_tbl(tbl_name) self._validate_cols(tbl_name, list(data.keys())) for col in data.keys(): val_in = data[col] if val_in is None: active_logger.warning(f'table "{tbl_name}", col "{col}" value is None, skipping processing') else: val_out = self._value_processors(val_in, tbl_name, col, prcs, prc_type) data[col] = val_out def insert_row(self, tbl_name: str, data: Dict): active_logger.info(f'inserting row of information into table "{tbl_name}"') active_logger.info(f'keys passed are:\n' f'{yaml.dump([str(k) for k in data.keys()])}') self._process_columns(data, tbl_name, self.col_prcs, 'process_in') row = self.Base.classes[tbl_name](**data) self.session.add(row) self.session.commit() def read_rows(self, tbl_nm: str, pkey_flts: Dict) -> List[Dict]: active_logger.info(f'reading rows from table "{tbl_nm}" with filter "{pkey_flts}"') self._validate_tbl(tbl_nm) self._validate_pkeys(tbl_nm, pkey_flts) if not self.row_exist(tbl_nm, pkey_flts): raise DBException(f'row in table "{tbl_nm}" with filters "{pkey_flts}" does not exist') sql_rows = self.apply_basic_filters(tbl_nm, pkey_flts).all() rows = [] for row in sql_rows: row_dict = { str(k): v for k, v in dict(row).items() } self._process_columns(row_dict, tbl_nm, self.col_prcs, 'process_out') rows.append(row_dict) return rows def read_row(self, tbl_nm: str, pkey_flts: Dict) -> Dict: rows = self.read_rows(tbl_nm, pkey_flts) if len(rows) != 1: raise DBException(f'expected 1 row from table "{tbl_nm}" with filters "{pkey_flts}", got {len(rows)}') return rows[0] def delete_rows(self, tbl_nm: str, pkey_flts: Dict) -> int: active_logger.info(f'deleting rows from table "{tbl_nm}" with filters: "{pkey_flts}"') q = self.apply_basic_filters(tbl_nm, pkey_flts) ret = q.delete(synchronize_session='fetch') self.session.commit() return ret def order_query(self, query: Query, cols, order_col: str, order_asc: bool): if order_col not in cols: raise DBException(f'cannot order by column "{order_col}", does not exist in CTE') order_func = sqlalchemy.asc if order_asc else sqlalchemy.desc return query.order_by(order_func(cols[order_col])) class DBCache(DBBase): def __init__(self, cache_root, cache_processors=None, **kwargs): super().__init__(**kwargs) self.cache_root = path.abspath(path.expandvars(cache_root)) if not path.isdir(self.cache_root): active_logger.info(f'creating cache root directory at: "{self.cache_root}"') os.makedirs(self.cache_root) else: active_logger.info(f'existing cache root directory found at: "{self.cache_root}"') self.cache_prcs = cache_processors or CACHE_PROCESSORS def cache_tbl(self, tbl_nm) -> str: return path.join(self.cache_root, tbl_nm) def cache_dir(self, tbl_nm: str, pkey_flts: Dict) -> str: return path.join(self.cache_tbl(tbl_nm), *pkey_flts.values()) def cache_col(self, tbl_nm: str, pkey_flts: Dict, col: str) -> str: return path.join(self.cache_dir(tbl_nm, pkey_flts), col) def clear_cache(self, tbl_nm: str, pkey_flts: Dict): active_logger.info(f'clearing cache from table "{tbl_nm}" with filters "{pkey_flts}"') p = self.cache_dir(tbl_nm, pkey_flts) if not path.exists(p): active_logger.info(f'path "{p}" does not exist, skipping') else: if not path.isdir(p): raise DBException(f'path "{p}" is not a directory') active_logger.info(f'removing cache dir: "{p}"') os.rmdir(p) def write_to_cache(self, tbl_nm: str, pkey_flts: Dict, data: Dict): self._validate_pkeys(tbl_nm, pkey_flts) self._validate_tbl(tbl_nm) d = self.cache_dir(tbl_nm, pkey_flts) active_logger.info(f'writing cache to path: "{d}"') if path.exists(d): active_logger.info('path already exists, exiting...') return os.makedirs(d, exist_ok=True) self._process_columns(data, tbl_nm, self.cache_prcs, 'process_out') for k in pkey_flts.keys(): data.pop(k, None) for col in data.keys(): if data[col] is None: active_logger.warning(f'column "{col}" value is none, skipping') else: p = self.cache_col(tbl_nm, pkey_flts, col) active_logger.info(f'writing column "{col}" to file: "{p}"') with open(p, 'w') as f: f.write(data[col]) def read_to_cache(self, tbl_nm: str, pkey_flts: Dict): active_logger.info(f'reading table "{tbl_nm}" row to cache with filters "{pkey_flts}"') data = self.read_row(tbl_nm, pkey_flts) self.write_to_cache(tbl_nm, pkey_flts, data) def insert_from_cache(self, tbl_nm, pkey_flts: Dict): active_logger.info(f'insert row to table "{tbl_nm}" from cache with filters "{pkey_flts}"') self._validate_pkeys(tbl_nm, pkey_flts) self._validate_tbl(tbl_nm) d = self.cache_dir(tbl_nm, pkey_flts) active_logger.info(f'getting files from cache directory: "{d}"') if not path.isdir(d): raise DBException(f'expected to be directory: "{d}"') data = pkey_flts.copy() _, _, files = next(os.walk(d)) self._validate_cols(tbl_nm, files) for fnm in files: fp = self.cache_col(tbl_nm, pkey_flts, fnm) active_logger.info(f'reading column data from file: "{fp}"') with open(fp, 'r') as f: data[fnm] = f.read() self._process_columns(data, tbl_nm, self.cache_prcs, 'process_in') self.insert_row(tbl_nm, data) def _cache_pkeys(self, tbl_nm: str): pkey_names = tuple(x.name for x in self.tables[tbl_nm].primary_key) def _get_pkeys(_dir: str, _base_pkey: Dict, _lvl) -> List: if not path.isdir(_dir): return [] _, dirnames, _ = next(os.walk(_dir)) return [_base_pkey | {pkey_names[_lvl]: d} for d in dirnames] lvl = 0 flts = [{}] while lvl < len(pkey_names): flts_out = [] for f in flts: d = self.cache_dir(tbl_nm, f) flts_out += _get_pkeys(d, f, lvl) flts = flts_out lvl += 1 return flts def scan_cache(self, tbl_nm: str, post_insert: Optional[Callable[[str, Dict], None]] = None) -> List[Dict]: tbl_root = self.cache_tbl(tbl_nm) active_logger.info(f'scanning for cached rows for table "{tbl_nm}" to insert in "{tbl_root}"') flts = self._cache_pkeys(tbl_nm) added_pkeys = [] for pkey_filters in flts: if self.row_exist(tbl_nm, pkey_filters): active_logger.info(f'row "{pkey_filters}" already exists in database, skipping...') else: self.insert_from_cache(tbl_nm, pkey_filters) added_pkeys.append(pkey_filters) if post_insert is not None: post_insert(tbl_nm, pkey_filters) return added_pkeys def wipe_cache(self) -> Tuple[int, int]: active_logger.info(f'clearing cache root at "{self.cache_root}"') _, dirnames, filenames = next(os.walk(self.cache_root)) for fnm in filenames: p = path.join(self.cache_root, fnm) os.remove(p) for dnm in dirnames: p = path.join(self.cache_root, dnm) shutil.rmtree(p) return len(filenames), len(dirnames) class QueryFilter(TypedDict): value: object field: str op: str def apply_filter_spec(tbl: Table, q: Query, filters_spec: List[QueryFilter]) -> Query: conditions = [ SqlOperator.OPERATORS[f['op']](tbl.columns[f['field']], f['value']) for f in filters_spec ] return q.filter(*conditions) class BettingDB: def __init__(self, **kwargs): self._dbc = DBCache(**kwargs) def read(self, tbl_nm: str, pkey_flts: Dict): return self._dbc.read_row(tbl_nm, pkey_flts) def close(self): self._dbc.session.close() def meta_serialise(self, market_info: Dict) -> None: self._dbc._process_columns(market_info, 'marketmeta', self._dbc.cache_prcs, 'process_out') def meta_de_serialise(self, market_info: Dict) -> None: self._dbc._process_columns(market_info, 'marketmeta', self._dbc.cache_prcs, 'process_in') @staticmethod def get_meta(first_book: MarketBook, cat: MarketCatalogue = None) -> Dict: mktdef: MarketDefinition = first_book.market_definition mktid = first_book.market_id init_time = first_book.publish_time pre_off = mktdef.market_time - init_time metadata = { 'market_id': mktid, 'sport_id': mktdef.event_type_id, 'market_time': mktdef.market_time, 'market_type': mktdef.market_type, 'betting_type': mktdef.betting_type, 'country_code': mktdef.country_code, 'event_id': mktdef.event_id, 'event_name': mktdef.event_name, 'timezone': mktdef.timezone, 'venue': mktdef.venue, 'init_time': init_time, 'pre_off': pre_off, 'format': 'historic', } if cat is not None: metadata['event_name'] = cat.event.name metadata['venue'] = cat.event.venue metadata['format'] = 'recorded' return metadata @staticmethod def get_first_book(file_path: str) -> Optional[MarketBook]: with open(file_path) as f: l = f.readline() q = Queue() listener = StreamListener(q, max_latency=sys.float_info.max) listener.register_stream(0, 'marketSubscription') listener.on_data(l) return listener.output_queue.get()[0] def insert_market_meta(self, market_id: str): active_logger.info(f'creating metadata database entry for market "{market_id}"') pkey_flts = {'market_id': market_id} self._dbc.read_to_cache('marketstream', pkey_flts) stream_path = self._dbc.cache_col('marketstream', pkey_flts, 'stream_updates') bk = self.get_first_book(stream_path) cat = None cat_path = self._dbc.cache_col('marketstream', pkey_flts, 'catalogue') if path.exists(cat_path): if path.getsize(cat_path): with open(cat_path, 'r') as f: cat_dict = json.loads(f.read()) try: cat = MarketCatalogue(**cat_dict) except TypeError as e: raise DBException(f'failed to create market catalogue: {e}') if cat is None: names = {r.selection_id: r.name for r in bk.market_definition.runners} else: names = {r.selection_id: r.runner_name for r in cat.runners} for runner_id, name in names.items(): active_logger.info(f'creating row for market "{market_id}", runner "{runner_id}", name "{name}"') self._dbc.insert_row('marketrunners', { 'market_id': market_id, 'runner_id': runner_id, 'runner_name': name }) meta_data = self.get_meta(bk, cat) self._dbc.insert_row('marketmeta', meta_data) def insert_strategy_runners(self, pkey_filters, profit_func: Callable[[str], Dict]): p = self._dbc.cache_col('strategyupdates', pkey_filters, 'strategy_updates') if not path.isfile(p): raise DBException(f'expected strategy update file at "{p}"') runner_profits = profit_func(p) for k, v in runner_profits.items(): self._dbc.insert_row('strategyrunners', pkey_filters | { 'runner_id': k, 'profit': v }) def wipe_cache(self) -> Tuple[int, int]: return self._dbc.wipe_cache() def scan_mkt_cache(self) -> List[Dict]: def mkt_post_insert(tbl_name, pkey_flts): if tbl_name != 'marketstream': raise DBException(f'expected "marketstream" table') self.insert_market_meta(pkey_flts['market_id']) return self._dbc.scan_cache('marketstream', mkt_post_insert) def scan_strat_cache(self, profit_func: Callable[[str], Dict]) -> List[Dict]: def strat_post_insert(tbl_nm, pkey_flts): self.insert_strategy_runners(pkey_flts, profit_func) added_keys = self._dbc.scan_cache('strategymeta') self._dbc.scan_cache('strategyupdates', strat_post_insert) return added_keys def write_strat_info(self, strategy_id, type: str, name: str, exec_time: datetime, info: dict): data = { 'type': type, 'name': name, 'exec_time': exec_time, 'info': info } self._dbc.write_to_cache( tbl_nm='strategymeta', pkey_flts={ 'strategy_id': str(strategy_id) }, data=data ) def path_mkt_usr_updates(self, market_id) -> str: return self._dbc.cache_col( tbl_nm='marketstream', pkey_flts={ 'market_id': market_id }, col='user_data' ) def path_mkt_cat(self, market_id) -> str: return self._dbc.cache_col( tbl_nm='marketstream', pkey_flts={ 'market_id': market_id }, col='catalogue', ) def path_mkt_updates(self, market_id) -> str: return self._dbc.cache_col( tbl_nm='marketstream', pkey_flts={ 'market_id': market_id }, col='stream_updates', ) def path_strat_features(self, market_id, strategy_id) -> str: return self._dbc.cache_col( tbl_nm='strategyupdates', pkey_flts={ 'strategy_id': str(strategy_id), 'market_id': market_id, }, col='strategy_features' ) def path_strat_updates(self, market_id, strategy_id) -> str: return self._dbc.cache_col( tbl_nm='strategyupdates', pkey_flts={ 'strategy_id': str(strategy_id), 'market_id': market_id }, col='strategy_updates' ) def paths_market_updates(self, filter_spec: List[QueryFilter], limit=200): tbl = self._dbc.tables['marketmeta'] q = self._dbc.session.query(tbl) q_flt = apply_filter_spec(tbl, q, filter_spec) rows = q_flt.limit(limit).all() update_paths = [] for row in rows: mkt_flt = {'market_id': row.market_id} self._dbc.read_to_cache('marketstream', mkt_flt) p = self._dbc.cache_col('marketstream', mkt_flt, 'stream_updates') if not path.isfile(p): raise DBException(f'expected file at stream update path: "{p}"') update_paths.append(p) return update_paths def rows_runners(self, market_id, strategy_id) -> List[Dict]: sr = self._dbc.tables['strategyrunners'] cte_strat = self._dbc.session.query( sr.columns['runner_id'], sr.columns['profit'].label('runner_profit') ).filter( sr.columns['strategy_id'] == strategy_id, sr.columns['market_id'] == market_id ).cte() rn = self._dbc.tables['marketrunners'] rows = self._dbc.session.query( rn, cte_strat.c['runner_profit'], ).join( cte_strat, rn.columns['runner_id'] == cte_strat.c['runner_id'], isouter=True, ).filter( rn.columns['market_id'] == market_id ).all() return [dict(row) for row in rows] def rows_market(self, cte, col_names, max_rows, order_col=None, order_asc=False) -> List[Dict]: cols = [cte.c[nm] for nm in col_names] q = self._dbc.session.query(*cols) if order_col is not None: q = self._dbc.order_query(q, cte.c, order_col, order_asc) rows = q.limit(max_rows).all() return [dict(row) for row in rows] def rows_strategy(self, max_rows) -> List[Dict]: shn = self._dbc.session sm = self._dbc.tables['strategymeta'] sr = self._dbc.tables['strategyrunners'] p_cte = shn.query( sr.columns['strategy_id'], func.sum(sr.columns['profit']).label('total_profit') ).group_by(sr.columns['strategy_id']).cte() m_cte = shn.query(sr.c['strategy_id'], sr.c['market_id']).distinct().cte() m_cte = shn.query( m_cte.c['strategy_id'], func.count(m_cte.c['market_id']).label('n_markets') ).group_by(m_cte.c['strategy_id']).cte() q = shn.query(sm, p_cte.c['total_profit'], m_cte.c['n_markets']).join( p_cte, sm.c['strategy_id'] == p_cte.c['strategy_id'], isouter=True ).join( m_cte, sm.c['strategy_id'] == m_cte.c['strategy_id'], isouter=True ) return [dict(row) for row in q.limit(max_rows).all()] def filters_labels(self, filters: DBFilterHandler, cte) -> List[List[Dict[str, Any]]]: return filters.filters_labels(self._dbc.session, self._dbc.tables, cte) def cte_count(self, cte: CTE) -> int: return self._dbc.session.query(cte).count() def strategy_count(self) -> int: return self._dbc.session.query(self._dbc.tables['strategymeta']).count() def strategy_delete(self, strategy_id) -> Tuple[int, int ,int]: strategy_id = str(strategy_id) active_logger.info(f'attempting to delete strategy: "{strategy_id}"') pkey_flt = {'strategy_id': strategy_id} if not self._dbc.row_exist('strategymeta', pkey_flt): raise DBException(f'strategy does not exist, using filters: "{pkey_flt}"') if not strategy_id: raise DBException(f'trying to delete strategy where ID passed is blank!') rows = self._dbc.read_rows('strategymeta', pkey_flt) if len(rows) != 1: raise DBException(f'expected 1 strategy meta row with filter: "{pkey_flt}"') n_runners = self._dbc.delete_rows('strategyrunners', pkey_flt) active_logger.info(f'deleted {n_runners} rows from "strategyrunners" table') n_mkts = self._dbc.delete_rows('strategyupdates', pkey_flt) active_logger.info(f'deleted {n_mkts} rows from "strategyupdates" table') n_meta = self._dbc.delete_rows('strategymeta', pkey_flt) active_logger.info(f'deleted {n_meta} rows from "strategymeta" table') return n_meta, n_mkts, n_runners def filters_strat_cte(self, strat_filters: DBFilterHandler) -> CTE: strat_meta = self._dbc.tables['strategymeta'] q = self._dbc.session.query(strat_meta).filter( *strat_filters.filters_conditions(strat_meta) ) return q.cte() def filters_mkt_cte(self, strategy_id, column_filters: List[ColumnElement]) -> CTE: meta = self._dbc.tables['marketmeta'] sr = self._dbc.tables['strategyrunners'] if strategy_id: strat_cte = self._dbc.session.query( sr.columns['market_id'], sql_sum(sr.columns['profit']).label('market_profit') ).filter( sr.columns['strategy_id'] == strategy_id ).group_by( sr.columns['market_id'] ).cte() q = self._dbc.session.query( meta, strat_cte.c['market_profit'] ).join( strat_cte, meta.columns['market_id'] == strat_cte.c['market_id'] ) else: q = self._dbc.session.query( meta, sqlalchemy.null().label('market_profit') ) q = q.filter(*column_filters) return q.cte() def cache_strat_updates(self, strategy_id, market_id): pkey_flts = { 'strategy_id': str(strategy_id), 'market_id': market_id } self._dbc.read_to_cache('strategyupdates', pkey_flts) def cache_strat_meta(self, strategy_id): pkey_flt = {'strategy_id': strategy_id} self._dbc.read_to_cache('strategymeta', pkey_flt) def cache_mkt_stream(self, market_id): pkey_flt = {'market_id': market_id} self._dbc.read_to_cache('marketstream', pkey_flt) def read_mkt_meta(self, market_id) -> Dict: pkey_flt = {'market_id': market_id} return self._dbc.read_row('marketmeta', pkey_flt) def _lost_ids(self, t1: Table, t2, id_col: str): cte = self._dbc.session.query( t2.columns[id_col] ).group_by(t2.columns[id_col]).cte() return self._dbc.session.query( t1.columns[id_col], cte.c[id_col] ).join( cte, t1.columns[id_col] == cte.c[id_col], isouter=True ).filter(cte.c[id_col] == None) def health_check(self): mkt_stm = self._dbc.tables['marketstream'] mkt_met = self._dbc.tables['marketmeta'] mkt_run = self._dbc.tables['marketrunners'] n_mkt = self._dbc.session.query(mkt_stm).count() active_logger.info(f'{n_mkt} market stream rows') n_met = self._dbc.session.query(mkt_met).count() active_logger.info(f'{n_met} market meta rows') q = self._lost_ids(mkt_stm, mkt_met, 'market_id') for row in q.all(): active_logger.error(f'market "{row[0]}" does not have a meta row') nrun = self._dbc.session.query(mkt_run).count() active_logger.info(f'{nrun} market runner rows') q = self._lost_ids(mkt_stm, mkt_run, 'market_id') for row in q.all(): active_logger.error(f'market "{row[0]}" does not have any runner rows') srt_met = self._dbc.tables['strategymeta'] srt_run = self._dbc.tables['strategyrunners'] srt_udt = self._dbc.tables['strategyupdates'] n_srtmet = self._dbc.session.query(srt_met).count() active_logger.info(f'{n_srtmet} strategy meta rows found') n_srtudt = self._dbc.session.query(srt_udt).count() active_logger.info(f'{n_srtudt} strategy market update rows found') q = self._lost_ids(srt_met, srt_udt, 'strategy_id') for row in q.all(): active_logger.error(f'strategy "{row[0]}" does not have any market updates') n_srtrun = self._dbc.session.query(srt_run).count() active_logger.info(f'{n_srtrun} strategy runner rows found') q = self._lost_ids(srt_met, srt_run, 'strategy_id') for row in q.all(): active_logger.error(f'strategy "{row[0]}" does not have any runner rows')
true
true
f70419112d1be61668bdf2e4076d94273508b66b
838
py
Python
rest_vk_api/urls.py
vadimk2016/rest_vk_api
a21a30469b29208bf7f3386d07af6ff6ed14aae0
[ "MIT" ]
null
null
null
rest_vk_api/urls.py
vadimk2016/rest_vk_api
a21a30469b29208bf7f3386d07af6ff6ed14aae0
[ "MIT" ]
2
2020-06-05T18:09:34.000Z
2021-03-19T21:59:24.000Z
rest_vk_api/urls.py
vadimk2016/rest-vk-api
a21a30469b29208bf7f3386d07af6ff6ed14aae0
[ "MIT" ]
null
null
null
"""rest_vk_api URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.conf.urls import url from main import views urlpatterns = [ url(r'^users/(?P<user_ids>[0-9]+).*', views.get_user, name='get_users'), url(r'^status$', views.status, name='status'), ]
36.434783
77
0.696897
from django.conf.urls import url from main import views urlpatterns = [ url(r'^users/(?P<user_ids>[0-9]+).*', views.get_user, name='get_users'), url(r'^status$', views.status, name='status'), ]
true
true
f7041992e58e0432873e5f2a1e78ad8538963b77
262
py
Python
tests/test_portscan_ssh.py
lynxis/testWrt
072ba9236f6a392d924d838454beb60504b3e554
[ "BSD-3-Clause" ]
2
2019-05-24T23:27:16.000Z
2019-05-25T08:10:31.000Z
tests/test_portscan_ssh.py
lynxis/testWrt
072ba9236f6a392d924d838454beb60504b3e554
[ "BSD-3-Clause" ]
1
2022-03-29T21:52:54.000Z
2022-03-29T21:52:54.000Z
tests/test_portscan_ssh.py
lynxis/testWrt
072ba9236f6a392d924d838454beb60504b3e554
[ "BSD-3-Clause" ]
1
2016-05-15T03:36:55.000Z
2016-05-15T03:36:55.000Z
#!/usr/bin/env python from testWrt import testsetup from testWrt.lib import SSHOpenWrt if __name__ == "__main__": ts = testsetup.create_generic() device = SSHOpenWrt(hostname="192.168.1.1", password="test") ret = device.portscan(22) print(ret)
23.818182
64
0.706107
from testWrt import testsetup from testWrt.lib import SSHOpenWrt if __name__ == "__main__": ts = testsetup.create_generic() device = SSHOpenWrt(hostname="192.168.1.1", password="test") ret = device.portscan(22) print(ret)
true
true
f7041a0db9cf7f2e654b1f17074c45eb5fb92436
4,833
py
Python
opennem/pipelines/wem/facility_scada.py
paulculmsee/opennem
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
[ "MIT" ]
22
2020-06-30T05:27:21.000Z
2022-02-21T12:13:51.000Z
opennem/pipelines/wem/facility_scada.py
paulculmsee/opennem
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
[ "MIT" ]
71
2020-08-07T13:06:30.000Z
2022-03-15T06:44:49.000Z
opennem/pipelines/wem/facility_scada.py
paulculmsee/opennem
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
[ "MIT" ]
13
2020-06-30T03:28:32.000Z
2021-12-30T08:17:16.000Z
import csv import logging from datetime import datetime, timedelta from typing import Any, Dict, Optional from scrapy import Spider from sqlalchemy.dialects.postgresql import insert from opennem.core.normalizers import normalize_duid from opennem.db import SessionLocal, get_database_engine from opennem.db.models.opennem import FacilityScada from opennem.pipelines.nem.opennem import unit_scada_generate_facility_scada from opennem.schema.network import NetworkWEM from opennem.utils.dates import parse_date from opennem.utils.pipelines import check_spider_pipeline logger = logging.getLogger(__name__) class WemStoreFacilityScada(object): @check_spider_pipeline def process_item( self, item: Dict[str, Any], spider: Optional[Spider] = None ) -> Dict[str, Any]: if "content" not in item: logger.error("No item content slipping store facility scada") return item csvreader = csv.DictReader(item["content"].split("\n")) item["table_schema"] = FacilityScada item["update_fields"] = ["generated", "eoi_quantity"] item["records"] = unit_scada_generate_facility_scada( csvreader, spider, interval_field="Trading Interval", facility_code_field="Facility Code", power_field="EOI Quantity (MW)", energy_field="Energy Generated (MWh)", network=NetworkWEM, ) item["content"] = None return item class WemStoreFacilityIntervals(object): @check_spider_pipeline def process_item( self, item: Dict[str, Any], spider: Optional[Spider] = None ) -> Dict[str, Any]: if "content" not in item: logger.error("No item content slipping store facility scada") return item csvreader = csv.DictReader(item["content"].split("\n")) item["table_schema"] = FacilityScada item["update_fields"] = ["generated"] item["records"] = unit_scada_generate_facility_scada( csvreader, spider, interval_field="PERIOD", facility_code_field="FACILITY_CODE", power_field="ACTUAL_MW", network=NetworkWEM, ) item["content"] = None return item class WemStoreLiveFacilityScada(object): """ Store live facility scada data. @NOTE no longer used """ @check_spider_pipeline def process_item(self, item: Dict[str, Any], spider: Optional[Spider] = None) -> int: session = SessionLocal() engine = get_database_engine() csvreader = csv.DictReader(item["content"].split("\n")) records_to_store = [] last_asat = None for row in csvreader: # @TODO MAX_GEN_CAPACITY # facility_capacity = row["MAX_GEN_CAPACITY"] if row["AS_AT"] != "": last_asat = parse_date(row["AS_AT"], network=NetworkWEM, dayfirst=False) if not last_asat or type(last_asat) is not datetime: logger.error("Invalid row or no datetime") continue # We need to pivot the table since columns are time intervals for i in range(1, 48): column = f"I{i:02}" if column not in row: logger.error("Do not have data for interval {}".format(column)) continue if i > 0: interval = last_asat - timedelta(minutes=(i - 1) * 30) else: interval = last_asat facility_code = normalize_duid(row["FACILITY_CODE"]) val = None try: val = float(row[column]) / 2 or None except ValueError: pass records_to_store.append( { "created_by": spider.name, "network_id": "WEM", "trading_interval": interval, "facility_code": facility_code, "eoi_quantity": val, } ) stmt = insert(FacilityScada).values(records_to_store) stmt.bind = engine stmt = stmt.on_conflict_do_update( index_elements=["trading_interval", "network_id", "facility_code", "is_forecast"], set_={ # "updated_by": stmt.excluded.created_by, "eoi_quantity": stmt.excluded.eoi_quantity, }, ) try: session.execute(stmt) session.commit() except Exception as e: logger.error("Error inserting records") logger.error(e) finally: session.close() return len(records_to_store)
30.783439
94
0.578523
import csv import logging from datetime import datetime, timedelta from typing import Any, Dict, Optional from scrapy import Spider from sqlalchemy.dialects.postgresql import insert from opennem.core.normalizers import normalize_duid from opennem.db import SessionLocal, get_database_engine from opennem.db.models.opennem import FacilityScada from opennem.pipelines.nem.opennem import unit_scada_generate_facility_scada from opennem.schema.network import NetworkWEM from opennem.utils.dates import parse_date from opennem.utils.pipelines import check_spider_pipeline logger = logging.getLogger(__name__) class WemStoreFacilityScada(object): @check_spider_pipeline def process_item( self, item: Dict[str, Any], spider: Optional[Spider] = None ) -> Dict[str, Any]: if "content" not in item: logger.error("No item content slipping store facility scada") return item csvreader = csv.DictReader(item["content"].split("\n")) item["table_schema"] = FacilityScada item["update_fields"] = ["generated", "eoi_quantity"] item["records"] = unit_scada_generate_facility_scada( csvreader, spider, interval_field="Trading Interval", facility_code_field="Facility Code", power_field="EOI Quantity (MW)", energy_field="Energy Generated (MWh)", network=NetworkWEM, ) item["content"] = None return item class WemStoreFacilityIntervals(object): @check_spider_pipeline def process_item( self, item: Dict[str, Any], spider: Optional[Spider] = None ) -> Dict[str, Any]: if "content" not in item: logger.error("No item content slipping store facility scada") return item csvreader = csv.DictReader(item["content"].split("\n")) item["table_schema"] = FacilityScada item["update_fields"] = ["generated"] item["records"] = unit_scada_generate_facility_scada( csvreader, spider, interval_field="PERIOD", facility_code_field="FACILITY_CODE", power_field="ACTUAL_MW", network=NetworkWEM, ) item["content"] = None return item class WemStoreLiveFacilityScada(object): @check_spider_pipeline def process_item(self, item: Dict[str, Any], spider: Optional[Spider] = None) -> int: session = SessionLocal() engine = get_database_engine() csvreader = csv.DictReader(item["content"].split("\n")) records_to_store = [] last_asat = None for row in csvreader: if row["AS_AT"] != "": last_asat = parse_date(row["AS_AT"], network=NetworkWEM, dayfirst=False) if not last_asat or type(last_asat) is not datetime: logger.error("Invalid row or no datetime") continue for i in range(1, 48): column = f"I{i:02}" if column not in row: logger.error("Do not have data for interval {}".format(column)) continue if i > 0: interval = last_asat - timedelta(minutes=(i - 1) * 30) else: interval = last_asat facility_code = normalize_duid(row["FACILITY_CODE"]) val = None try: val = float(row[column]) / 2 or None except ValueError: pass records_to_store.append( { "created_by": spider.name, "network_id": "WEM", "trading_interval": interval, "facility_code": facility_code, "eoi_quantity": val, } ) stmt = insert(FacilityScada).values(records_to_store) stmt.bind = engine stmt = stmt.on_conflict_do_update( index_elements=["trading_interval", "network_id", "facility_code", "is_forecast"], set_={ "eoi_quantity": stmt.excluded.eoi_quantity, }, ) try: session.execute(stmt) session.commit() except Exception as e: logger.error("Error inserting records") logger.error(e) finally: session.close() return len(records_to_store)
true
true
f7041a14cedc1d07490ad87b40b99d090c5fa1b2
8,677
py
Python
indico/modules/attachments/forms.py
UNOG-Indico/UNOG-Indico-v2
4fa4393cc1f3b453a69f5e0ea3b52c18337831a5
[ "MIT" ]
null
null
null
indico/modules/attachments/forms.py
UNOG-Indico/UNOG-Indico-v2
4fa4393cc1f3b453a69f5e0ea3b52c18337831a5
[ "MIT" ]
null
null
null
indico/modules/attachments/forms.py
UNOG-Indico/UNOG-Indico-v2
4fa4393cc1f3b453a69f5e0ea3b52c18337831a5
[ "MIT" ]
null
null
null
# This file is part of Indico. # Copyright (C) 2002 - 2021 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import unicode_literals from wtforms.ext.sqlalchemy.fields import QuerySelectField from wtforms.fields import BooleanField, TextAreaField from wtforms.fields.html5 import URLField from wtforms.fields.simple import HiddenField, StringField from wtforms.validators import DataRequired, Optional, ValidationError from indico.core.db import db from indico.core.db.sqlalchemy.protection import ProtectionMode from indico.modules.attachments.models.folders import AttachmentFolder from indico.modules.attachments.util import get_default_folder_names from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.forms.base import IndicoForm, generated_data from indico.web.forms.fields import (AccessControlListField, EditableFileField, FileField, IndicoDateField, IndicoRadioField, IndicoSelectMultipleCheckboxField) from indico.web.forms.validators import HiddenUnless, UsedIf from indico.web.forms.widgets import SwitchWidget, TypeaheadWidget class AttachmentFormBase(IndicoForm): protected = BooleanField(_("Protected"), widget=SwitchWidget()) folder = QuerySelectField(_("Folder"), allow_blank=True, blank_text=_("No folder selected"), get_label='title', description=_("Adding materials to folders allow grouping and easier permission " "management.")) acl = AccessControlListField(_("Access control list"), [UsedIf(lambda form, field: form.protected.data)], allow_groups=True, allow_external_users=True, allow_event_roles=True, allow_category_roles=True, allow_registration_forms=True, event=lambda form: form.event, default_text=_('Restrict access to this material'), description=_("The list of users and groups allowed to access the material")) def __init__(self, *args, **kwargs): linked_object = kwargs.pop('linked_object') self.event = getattr(linked_object, 'event', None) # not present in categories super(AttachmentFormBase, self).__init__(*args, **kwargs) self.folder.query = (AttachmentFolder .find(object=linked_object, is_default=False, is_deleted=False) .order_by(db.func.lower(AttachmentFolder.title))) @generated_data def protection_mode(self): return ProtectionMode.protected if self.protected.data else ProtectionMode.inheriting class EditAttachmentFormBase(AttachmentFormBase): title = StringField(_("Title"), [DataRequired()]) description = TextAreaField(_("Description")) class AddAttachmentFilesForm(AttachmentFormBase): files = FileField(_("Files"), multiple_files=True) def _get_file_data(attachment): file = attachment.file return { 'url': url_for('attachments.download', attachment, filename=file.filename, from_preview='1'), 'filename': file.filename, 'size': file.size, 'content_type': file.content_type } class EditAttachmentFileForm(EditAttachmentFormBase): file = EditableFileField(_("File"), add_remove_links=False, get_metadata=_get_file_data, description=_("Already uploaded file. Replace it by adding a new file.")) class AttachmentLinkFormMixin(object): title = StringField(_("Title"), [DataRequired()]) link_url = URLField(_("URL"), [DataRequired()]) class AddAttachmentLinkForm(AttachmentLinkFormMixin, AttachmentFormBase): pass class EditAttachmentLinkForm(AttachmentLinkFormMixin, EditAttachmentFormBase): pass class AttachmentFolderForm(IndicoForm): title = HiddenField(_("Name"), [DataRequired()], widget=TypeaheadWidget(), description=_("The name of the folder.")) description = TextAreaField(_("Description"), description=_("Description of the folder and its content")) protected = BooleanField(_("Protected"), widget=SwitchWidget()) acl = AccessControlListField(_("Access control list"), [UsedIf(lambda form, field: form.protected.data)], allow_groups=True, allow_external_users=True, allow_event_roles=True, allow_category_roles=True, allow_registration_forms=True, event=lambda form: form.event, default_text=_('Restrict access to this folder'), description=_("The list of users and groups allowed to access the folder")) is_always_visible = BooleanField(_("Always Visible"), [HiddenUnless('is_hidden', value=False)], widget=SwitchWidget(), description=_("By default, folders are always visible, even if a user cannot " "access them. You can disable this behavior here, hiding the folder " "for anyone who does not have permission to access it.")) is_hidden = BooleanField(_("Always hidden"), [HiddenUnless('is_always_visible', value=False)], widget=SwitchWidget(), description=_("Always hide the folder and its contents from public display areas of " "the event. You can use this for folders to store non-image files used " "e.g. in download links. The access permissions still apply.")) def __init__(self, *args, **kwargs): self.linked_object = kwargs.pop('linked_object') self.event = getattr(self.linked_object, 'event', None) # not present in categories super(AttachmentFolderForm, self).__init__(*args, **kwargs) self.title.choices = self._get_title_suggestions() def _get_title_suggestions(self): query = db.session.query(AttachmentFolder.title).filter_by(is_deleted=False, is_default=False, object=self.linked_object) existing = set(x[0] for x in query) suggestions = set(get_default_folder_names()) - existing if self.title.data: suggestions.add(self.title.data) return sorted(suggestions) def validate_is_always_visible(self, field): if self.is_always_visible.data and self.is_hidden.data: raise ValidationError('These two options cannot be used at the same time') validate_is_hidden = validate_is_always_visible @generated_data def protection_mode(self): return ProtectionMode.protected if self.protected.data else ProtectionMode.inheriting class AttachmentPackageForm(IndicoForm): added_since = IndicoDateField(_('Added Since'), [Optional()], description=_('Include only attachments uploaded after this date')) filter_type = IndicoRadioField(_('Include'), [DataRequired()]) sessions = IndicoSelectMultipleCheckboxField(_('Sessions'), [UsedIf(lambda form, _: form.filter_type.data == 'sessions'), DataRequired()], description=_('Include materials from selected sessions'), coerce=int) contributions = IndicoSelectMultipleCheckboxField(_('Contributions'), [UsedIf(lambda form, _: form.filter_type.data == 'contributions'), DataRequired()], description=_('Include materials from selected contributions'), coerce=int) dates = IndicoSelectMultipleCheckboxField(_('Events scheduled on'), [UsedIf(lambda form, _: form.filter_type.data == 'dates'), DataRequired()], description=_('Include materials from sessions/contributions scheduled ' 'on the selected dates'))
53.561728
120
0.618071
from __future__ import unicode_literals from wtforms.ext.sqlalchemy.fields import QuerySelectField from wtforms.fields import BooleanField, TextAreaField from wtforms.fields.html5 import URLField from wtforms.fields.simple import HiddenField, StringField from wtforms.validators import DataRequired, Optional, ValidationError from indico.core.db import db from indico.core.db.sqlalchemy.protection import ProtectionMode from indico.modules.attachments.models.folders import AttachmentFolder from indico.modules.attachments.util import get_default_folder_names from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.forms.base import IndicoForm, generated_data from indico.web.forms.fields import (AccessControlListField, EditableFileField, FileField, IndicoDateField, IndicoRadioField, IndicoSelectMultipleCheckboxField) from indico.web.forms.validators import HiddenUnless, UsedIf from indico.web.forms.widgets import SwitchWidget, TypeaheadWidget class AttachmentFormBase(IndicoForm): protected = BooleanField(_("Protected"), widget=SwitchWidget()) folder = QuerySelectField(_("Folder"), allow_blank=True, blank_text=_("No folder selected"), get_label='title', description=_("Adding materials to folders allow grouping and easier permission " "management.")) acl = AccessControlListField(_("Access control list"), [UsedIf(lambda form, field: form.protected.data)], allow_groups=True, allow_external_users=True, allow_event_roles=True, allow_category_roles=True, allow_registration_forms=True, event=lambda form: form.event, default_text=_('Restrict access to this material'), description=_("The list of users and groups allowed to access the material")) def __init__(self, *args, **kwargs): linked_object = kwargs.pop('linked_object') self.event = getattr(linked_object, 'event', None) super(AttachmentFormBase, self).__init__(*args, **kwargs) self.folder.query = (AttachmentFolder .find(object=linked_object, is_default=False, is_deleted=False) .order_by(db.func.lower(AttachmentFolder.title))) @generated_data def protection_mode(self): return ProtectionMode.protected if self.protected.data else ProtectionMode.inheriting class EditAttachmentFormBase(AttachmentFormBase): title = StringField(_("Title"), [DataRequired()]) description = TextAreaField(_("Description")) class AddAttachmentFilesForm(AttachmentFormBase): files = FileField(_("Files"), multiple_files=True) def _get_file_data(attachment): file = attachment.file return { 'url': url_for('attachments.download', attachment, filename=file.filename, from_preview='1'), 'filename': file.filename, 'size': file.size, 'content_type': file.content_type } class EditAttachmentFileForm(EditAttachmentFormBase): file = EditableFileField(_("File"), add_remove_links=False, get_metadata=_get_file_data, description=_("Already uploaded file. Replace it by adding a new file.")) class AttachmentLinkFormMixin(object): title = StringField(_("Title"), [DataRequired()]) link_url = URLField(_("URL"), [DataRequired()]) class AddAttachmentLinkForm(AttachmentLinkFormMixin, AttachmentFormBase): pass class EditAttachmentLinkForm(AttachmentLinkFormMixin, EditAttachmentFormBase): pass class AttachmentFolderForm(IndicoForm): title = HiddenField(_("Name"), [DataRequired()], widget=TypeaheadWidget(), description=_("The name of the folder.")) description = TextAreaField(_("Description"), description=_("Description of the folder and its content")) protected = BooleanField(_("Protected"), widget=SwitchWidget()) acl = AccessControlListField(_("Access control list"), [UsedIf(lambda form, field: form.protected.data)], allow_groups=True, allow_external_users=True, allow_event_roles=True, allow_category_roles=True, allow_registration_forms=True, event=lambda form: form.event, default_text=_('Restrict access to this folder'), description=_("The list of users and groups allowed to access the folder")) is_always_visible = BooleanField(_("Always Visible"), [HiddenUnless('is_hidden', value=False)], widget=SwitchWidget(), description=_("By default, folders are always visible, even if a user cannot " "access them. You can disable this behavior here, hiding the folder " "for anyone who does not have permission to access it.")) is_hidden = BooleanField(_("Always hidden"), [HiddenUnless('is_always_visible', value=False)], widget=SwitchWidget(), description=_("Always hide the folder and its contents from public display areas of " "the event. You can use this for folders to store non-image files used " "e.g. in download links. The access permissions still apply.")) def __init__(self, *args, **kwargs): self.linked_object = kwargs.pop('linked_object') self.event = getattr(self.linked_object, 'event', None) super(AttachmentFolderForm, self).__init__(*args, **kwargs) self.title.choices = self._get_title_suggestions() def _get_title_suggestions(self): query = db.session.query(AttachmentFolder.title).filter_by(is_deleted=False, is_default=False, object=self.linked_object) existing = set(x[0] for x in query) suggestions = set(get_default_folder_names()) - existing if self.title.data: suggestions.add(self.title.data) return sorted(suggestions) def validate_is_always_visible(self, field): if self.is_always_visible.data and self.is_hidden.data: raise ValidationError('These two options cannot be used at the same time') validate_is_hidden = validate_is_always_visible @generated_data def protection_mode(self): return ProtectionMode.protected if self.protected.data else ProtectionMode.inheriting class AttachmentPackageForm(IndicoForm): added_since = IndicoDateField(_('Added Since'), [Optional()], description=_('Include only attachments uploaded after this date')) filter_type = IndicoRadioField(_('Include'), [DataRequired()]) sessions = IndicoSelectMultipleCheckboxField(_('Sessions'), [UsedIf(lambda form, _: form.filter_type.data == 'sessions'), DataRequired()], description=_('Include materials from selected sessions'), coerce=int) contributions = IndicoSelectMultipleCheckboxField(_('Contributions'), [UsedIf(lambda form, _: form.filter_type.data == 'contributions'), DataRequired()], description=_('Include materials from selected contributions'), coerce=int) dates = IndicoSelectMultipleCheckboxField(_('Events scheduled on'), [UsedIf(lambda form, _: form.filter_type.data == 'dates'), DataRequired()], description=_('Include materials from sessions/contributions scheduled ' 'on the selected dates'))
true
true
f7041a1e02aa41849a5ec7f6bedf4701c0481e97
749
py
Python
RecipeParser_Scraper.py
pherodeon/recipe-scrapers
816ee1cfd777149efff60ca01d377ab5e141e24b
[ "MIT" ]
null
null
null
RecipeParser_Scraper.py
pherodeon/recipe-scrapers
816ee1cfd777149efff60ca01d377ab5e141e24b
[ "MIT" ]
null
null
null
RecipeParser_Scraper.py
pherodeon/recipe-scrapers
816ee1cfd777149efff60ca01d377ab5e141e24b
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Thu Feb 6 20:55:32 2020 @author: arosso """ from recipe_scrapers import scrape_me # give the url as a string, it can be url from any site listed below # scraper = scrape_me('http://allrecipes.com/Recipe/Apple-Cake-Iv/Detail.aspx') scraper = scrape_me('https://www.101cookbooks.com/instant-pot-mushroom-stroganoff/') dict_recipe = dict() dict_recipe['title'] = scraper.title() dict_recipe['total_time'] = scraper.total_time() dict_recipe['yields'] = scraper.yields() dict_recipe['ingredients'] = scraper.ingredients() dict_recipe['instructions'] = scraper.instructions() #dict_recipe['image'] = scraper.image() #dict_recipe['links'] = scraper.links() print(dict_recipe)
25.827586
84
0.700935
from recipe_scrapers import scrape_me scraper = scrape_me('https://www.101cookbooks.com/instant-pot-mushroom-stroganoff/') dict_recipe = dict() dict_recipe['title'] = scraper.title() dict_recipe['total_time'] = scraper.total_time() dict_recipe['yields'] = scraper.yields() dict_recipe['ingredients'] = scraper.ingredients() dict_recipe['instructions'] = scraper.instructions() print(dict_recipe)
true
true
f7041a9f74821dafed13acb12aa043265464c9cb
9,581
py
Python
DC_method/util.py
Ikerlz/dcd
056e5c4060f9d655ce4f6234b86481ae4b3f7106
[ "MIT" ]
null
null
null
DC_method/util.py
Ikerlz/dcd
056e5c4060f9d655ce4f6234b86481ae4b3f7106
[ "MIT" ]
null
null
null
DC_method/util.py
Ikerlz/dcd
056e5c4060f9d655ce4f6234b86481ae4b3f7106
[ "MIT" ]
null
null
null
import numpy as np import pandas as pd from sklearn.cluster import KMeans import itertools import findspark import pyspark from pyspark.sql.functions import pandas_udf, PandasUDFType from pyspark.sql.types import * import time def simulate_sbm_dc_data(sbm_matrix, sample_size=1000, partition_num=10, cluster_num=3): """ :param sbm_matrix: :param sample_size: :param partition_num: :param cluster_num: :return: """ if (sbm_matrix.shape[0] != cluster_num) | \ (sbm_matrix.shape[1] != cluster_num) | \ (sbm_matrix.shape[0] != sbm_matrix.shape[1]): raise Exception("sbm_matrix shape Error or the Shape is not equal to Cluster_num") else: data_index = [x for x in range(sample_size)] data_cluster = np.random.randint(0, cluster_num, sample_size).tolist() index_cluster = dict(zip(data_index, data_cluster)) X = np.empty(shape=[0, 3], dtype=int) X = np.append(X, [[0, -1, np.random.randint(0, partition_num, 1)[0]]], axis=0) for i in range(1, sample_size): p_num = np.random.randint(0, partition_num, 1)[0] X = np.append(X, [[i, -1, p_num]], axis=0) # to avoid node lost for j in range(i): if np.random.binomial(1, sbm_matrix[index_cluster[i], index_cluster[j]], 1): X = np.append(X, [[i, j, p_num]], axis=0) data_pdf = pd.DataFrame(X, columns=["IndexNum1"] + ["IndexNum2"] + ["PartitionID"]) return data_pdf, index_cluster def get_laplace_matrix(adjacency_matrix, position="master", regularization=False): """ :param adjacency_matrix: 邻接矩阵(方阵或长矩阵) :param position: master或worker :param regularization: 是否进行正则化 :return: 拉普拉斯矩阵 """ if regularization: if position == "master": degree = np.sum(adjacency_matrix, axis=1) d = np.diag((degree + np.mean(degree)) ** (-0.5)) # 得到度矩阵 return np.dot(np.dot(d, adjacency_matrix), d) elif position == "worker": # 2020.7.18 for test out_degree = np.sum(adjacency_matrix, axis=1) out_degree_matrix = np.diag((out_degree + np.mean(out_degree)) ** (-0.5)) for i in range(out_degree_matrix.shape[0]): if out_degree_matrix[i, i] == np.infty: out_degree_matrix[i, i] = 1000 in_degree = np.sum(adjacency_matrix, axis=0) in_degree_matrix = np.diag((in_degree + np.mean(in_degree)) ** (-0.5)) ### laplace_matrix = np.dot(np.dot(out_degree_matrix, adjacency_matrix), in_degree_matrix) return laplace_matrix # D = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5)) # F = np.diag(np.sum(adjacency_matrix, axis=0) ** (-0.5)) # return np.dot(np.dot(D, adjacency_matrix), F) # 得到度矩阵 else: raise Exception("Input Error: worker or master is expected but {} are given".format(position)) else: if position == "master": d = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5)) # 得到度矩阵 return np.dot(np.dot(d, adjacency_matrix), d) elif position == "worker": out_degree_matrix = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5)) for i in range(out_degree_matrix.shape[0]): if out_degree_matrix[i, i] == np.infty: out_degree_matrix[i, i] = 10000 in_degree_matrix = np.diag(np.sum(adjacency_matrix, axis=0) ** (-0.5)) laplace_matrix = np.dot(np.dot(out_degree_matrix, adjacency_matrix), in_degree_matrix) return laplace_matrix # D = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5)) # F = np.diag(np.sum(adjacency_matrix, axis=0) ** (-0.5)) # return np.dot(np.dot(D, adjacency_matrix), F) # 得到度矩阵 else: raise Exception("Input Error: worker or master is expected but {} are given".format(position)) def get_spectral(laplace_matrix, k, normalization=False, method='svd'): """ :param laplace_matrix: 拉普拉斯矩阵 :param k: 截取SVD后的前k个向量 :param normalization: 是否归一化 :param method: 选择用奇异值分解(SVD)还是特征值分解(EVD) :return: 得到的谱 """ if method == 'svd': u, _, _ = np.linalg.svd(laplace_matrix) spectral = u[:, list(range(k))] if normalization: row_len = len(u) # 行数 for i in range(row_len): norm2 = np.linalg.norm(spectral[i]) if norm2: spectral[i] = spectral[i] / np.linalg.norm(spectral[i]) elif method == 'evd': e_vals, e_vecs = np.linalg.eig(laplace_matrix) sorted_indices = np.argsort(e_vals) spectral = e_vecs[:, sorted_indices[:-k-1:-1]] if normalization: row_len = len(e_vecs) # 行数 for i in range(row_len): norm2 = np.linalg.norm(spectral[i]) if norm2: spectral[i] = spectral[i] / np.linalg.norm(spectral[i]) else: raise ValueError("method must be 'svd' or 'evd' but {} is given".format(method)) return spectral def worker_clustering(worker_df, cluster_num): """ :param worker_df: :param method: :param cluster_num: :return: """ node_list = list(set(worker_df["IndexNum1"].tolist())) node_num = len(node_list) index_list = [x for x in range(node_num)] node2index = dict(zip(node_list, index_list)) adj_matrix = np.zeros((node_num, node_num), dtype=int) for i in range(node_num): adj_matrix[i][i] = 10 for row in worker_df.itertuples(index=False, name='Pandas'): item1 = getattr(row, "IndexNum1") item2 = getattr(row, "IndexNum2") if (item2 in node_list) & (item2 != -1): adj_matrix[node2index[item1]][node2index[item2]] = 1 adj_matrix[node2index[item2]][node2index[item1]] = 1 # first, get the laplace matrix laplace_matrix = get_laplace_matrix(adj_matrix, position='master', regularization=False) # second, get the spectral spectral = get_spectral(laplace_matrix, cluster_num, normalization=False, method='svd') # third, do k-means in spectral model = KMeans(n_clusters=cluster_num) model_fit = model.fit(spectral) # do k_means in spectral_transpose # cluster_center = model_fit.cluster_centers_ # center points cluster_label = list(model_fit.labels_) # labels (cluster information) # return worker_num = worker_df["PartitionID"].tolist()[0] out_df = pd.DataFrame({"PartitionID": [worker_num for _ in range(len(node_list))], "IndexNum": node_list, "ClusterExp": cluster_label}) return out_df def get_accurate(clustering_res_df, cluster_number, error=False): """ :param clustering_res_df: a pandas DataFrame about clustering result :param cluster_number: the number of the cluster (the first column is the index, the second column is the right information, the third column is the clustering information) :param error: if error=True, then return the error rate, else, return the accuracy rate :return: the clustering accuracy """ if clustering_res_df.shape[1] != 3: raise Exception("Shape Error: the input DataFrame's column number is not 3") real_dict = {} clustering_dict = {} for i in range(cluster_number): real_df = clustering_res_df.loc[clustering_res_df['ClusterInfo'] == i] clustering_df = clustering_res_df.loc[clustering_res_df['ClusterExp'] == i] real_dict[i] = real_df['IndexNum'].tolist() clustering_dict[i] = clustering_df['IndexNum'].tolist() accuracy_matrix = np.zeros((cluster_number, cluster_number)) for i in range(cluster_number): for j in range(cluster_number): accuracy_matrix[i][j] = len(set(real_dict[i]).intersection(set(clustering_dict[j]))) # for test # print("The accuracy matrix is: \n", accuracy_matrix) case_iterator = itertools.permutations(range(cluster_number), cluster_number) accurate = 0 for item in case_iterator: acc = sum([accuracy_matrix[i][item[i]] for i in range(cluster_number)]) if acc > accurate: accurate = acc if not error: return accurate / clustering_res_df.shape[0] else: return 1 - accurate / clustering_res_df.shape[0] # TODO some SBM matrix sbm_matrix1 = np.array([[0.7, 0.45, 0.45], [0.45, 0.7, 0.45], [0.45, 0.45, 0.7]]) sbm_matrix2 = np.array([[0.8, 0.4, 0.4], [0.4, 0.8, 0.4], [0.4, 0.4, 0.8]]) sbm_matrix3 = np.array([[0.6, 0.45, 0.45], [0.45, 0.6, 0.45], [0.45, 0.45, 0.6]]) sbm_matrix4 = np.array([[0.2, 0.1, 0.1], [0.1, 0.2, 0.1], [0.1, 0.1, 0.2]]) if __name__ == '__main__': # Model Settings sbm_matrix = sbm_matrix4 sample_size = 1000 master_num = 100 worker_per_sub = 20 partition_num = 50 cluster_num = 3 a, b = simulate_sbm_dc_data(sbm_matrix) c = worker_clustering(a, 3) real_label = [] for row in c.itertuples(index=False, name='Pandas'): item = getattr(row, "IndexNum") real_label.append(b[item]) c["ClusterInfo"] = real_label print(get_accurate(c, 3)) print(c) # print(a)
38.324
106
0.600355
import numpy as np import pandas as pd from sklearn.cluster import KMeans import itertools import findspark import pyspark from pyspark.sql.functions import pandas_udf, PandasUDFType from pyspark.sql.types import * import time def simulate_sbm_dc_data(sbm_matrix, sample_size=1000, partition_num=10, cluster_num=3): if (sbm_matrix.shape[0] != cluster_num) | \ (sbm_matrix.shape[1] != cluster_num) | \ (sbm_matrix.shape[0] != sbm_matrix.shape[1]): raise Exception("sbm_matrix shape Error or the Shape is not equal to Cluster_num") else: data_index = [x for x in range(sample_size)] data_cluster = np.random.randint(0, cluster_num, sample_size).tolist() index_cluster = dict(zip(data_index, data_cluster)) X = np.empty(shape=[0, 3], dtype=int) X = np.append(X, [[0, -1, np.random.randint(0, partition_num, 1)[0]]], axis=0) for i in range(1, sample_size): p_num = np.random.randint(0, partition_num, 1)[0] X = np.append(X, [[i, -1, p_num]], axis=0) for j in range(i): if np.random.binomial(1, sbm_matrix[index_cluster[i], index_cluster[j]], 1): X = np.append(X, [[i, j, p_num]], axis=0) data_pdf = pd.DataFrame(X, columns=["IndexNum1"] + ["IndexNum2"] + ["PartitionID"]) return data_pdf, index_cluster def get_laplace_matrix(adjacency_matrix, position="master", regularization=False): if regularization: if position == "master": degree = np.sum(adjacency_matrix, axis=1) d = np.diag((degree + np.mean(degree)) ** (-0.5)) return np.dot(np.dot(d, adjacency_matrix), d) elif position == "worker": out_degree = np.sum(adjacency_matrix, axis=1) out_degree_matrix = np.diag((out_degree + np.mean(out_degree)) ** (-0.5)) for i in range(out_degree_matrix.shape[0]): if out_degree_matrix[i, i] == np.infty: out_degree_matrix[i, i] = 1000 in_degree = np.sum(adjacency_matrix, axis=0) in_degree_matrix = np.diag((in_degree + np.mean(in_degree)) ** (-0.5)) laplace_matrix = np.dot(np.dot(out_degree_matrix, adjacency_matrix), in_degree_matrix) return laplace_matrix else: raise Exception("Input Error: worker or master is expected but {} are given".format(position)) else: if position == "master": d = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5)) return np.dot(np.dot(d, adjacency_matrix), d) elif position == "worker": out_degree_matrix = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5)) for i in range(out_degree_matrix.shape[0]): if out_degree_matrix[i, i] == np.infty: out_degree_matrix[i, i] = 10000 in_degree_matrix = np.diag(np.sum(adjacency_matrix, axis=0) ** (-0.5)) laplace_matrix = np.dot(np.dot(out_degree_matrix, adjacency_matrix), in_degree_matrix) return laplace_matrix else: raise Exception("Input Error: worker or master is expected but {} are given".format(position)) def get_spectral(laplace_matrix, k, normalization=False, method='svd'): if method == 'svd': u, _, _ = np.linalg.svd(laplace_matrix) spectral = u[:, list(range(k))] if normalization: row_len = len(u) for i in range(row_len): norm2 = np.linalg.norm(spectral[i]) if norm2: spectral[i] = spectral[i] / np.linalg.norm(spectral[i]) elif method == 'evd': e_vals, e_vecs = np.linalg.eig(laplace_matrix) sorted_indices = np.argsort(e_vals) spectral = e_vecs[:, sorted_indices[:-k-1:-1]] if normalization: row_len = len(e_vecs) for i in range(row_len): norm2 = np.linalg.norm(spectral[i]) if norm2: spectral[i] = spectral[i] / np.linalg.norm(spectral[i]) else: raise ValueError("method must be 'svd' or 'evd' but {} is given".format(method)) return spectral def worker_clustering(worker_df, cluster_num): node_list = list(set(worker_df["IndexNum1"].tolist())) node_num = len(node_list) index_list = [x for x in range(node_num)] node2index = dict(zip(node_list, index_list)) adj_matrix = np.zeros((node_num, node_num), dtype=int) for i in range(node_num): adj_matrix[i][i] = 10 for row in worker_df.itertuples(index=False, name='Pandas'): item1 = getattr(row, "IndexNum1") item2 = getattr(row, "IndexNum2") if (item2 in node_list) & (item2 != -1): adj_matrix[node2index[item1]][node2index[item2]] = 1 adj_matrix[node2index[item2]][node2index[item1]] = 1 laplace_matrix = get_laplace_matrix(adj_matrix, position='master', regularization=False) spectral = get_spectral(laplace_matrix, cluster_num, normalization=False, method='svd') model = KMeans(n_clusters=cluster_num) model_fit = model.fit(spectral) cluster_label = list(model_fit.labels_) worker_num = worker_df["PartitionID"].tolist()[0] out_df = pd.DataFrame({"PartitionID": [worker_num for _ in range(len(node_list))], "IndexNum": node_list, "ClusterExp": cluster_label}) return out_df def get_accurate(clustering_res_df, cluster_number, error=False): if clustering_res_df.shape[1] != 3: raise Exception("Shape Error: the input DataFrame's column number is not 3") real_dict = {} clustering_dict = {} for i in range(cluster_number): real_df = clustering_res_df.loc[clustering_res_df['ClusterInfo'] == i] clustering_df = clustering_res_df.loc[clustering_res_df['ClusterExp'] == i] real_dict[i] = real_df['IndexNum'].tolist() clustering_dict[i] = clustering_df['IndexNum'].tolist() accuracy_matrix = np.zeros((cluster_number, cluster_number)) for i in range(cluster_number): for j in range(cluster_number): accuracy_matrix[i][j] = len(set(real_dict[i]).intersection(set(clustering_dict[j]))) # for test # print("The accuracy matrix is: \n", accuracy_matrix) case_iterator = itertools.permutations(range(cluster_number), cluster_number) accurate = 0 for item in case_iterator: acc = sum([accuracy_matrix[i][item[i]] for i in range(cluster_number)]) if acc > accurate: accurate = acc if not error: return accurate / clustering_res_df.shape[0] else: return 1 - accurate / clustering_res_df.shape[0] # TODO some SBM matrix sbm_matrix1 = np.array([[0.7, 0.45, 0.45], [0.45, 0.7, 0.45], [0.45, 0.45, 0.7]]) sbm_matrix2 = np.array([[0.8, 0.4, 0.4], [0.4, 0.8, 0.4], [0.4, 0.4, 0.8]]) sbm_matrix3 = np.array([[0.6, 0.45, 0.45], [0.45, 0.6, 0.45], [0.45, 0.45, 0.6]]) sbm_matrix4 = np.array([[0.2, 0.1, 0.1], [0.1, 0.2, 0.1], [0.1, 0.1, 0.2]]) if __name__ == '__main__': # Model Settings sbm_matrix = sbm_matrix4 sample_size = 1000 master_num = 100 worker_per_sub = 20 partition_num = 50 cluster_num = 3 a, b = simulate_sbm_dc_data(sbm_matrix) c = worker_clustering(a, 3) real_label = [] for row in c.itertuples(index=False, name='Pandas'): item = getattr(row, "IndexNum") real_label.append(b[item]) c["ClusterInfo"] = real_label print(get_accurate(c, 3)) print(c) # print(a)
true
true
f7041bf911e595ee4a13e3144de596f82f262cd3
5,452
py
Python
src/sentry/models/eventerror.py
JannKleen/sentry
8b29c8234bb51a81d5cab821a1f2ed4ea8e8bd88
[ "BSD-3-Clause" ]
1
2019-02-27T15:13:06.000Z
2019-02-27T15:13:06.000Z
src/sentry/models/eventerror.py
rmax/sentry
8b29c8234bb51a81d5cab821a1f2ed4ea8e8bd88
[ "BSD-3-Clause" ]
5
2020-07-17T11:20:41.000Z
2021-05-09T12:16:53.000Z
src/sentry/models/eventerror.py
zaasmi/codeerrorhelp
1ab8d3e314386b9b2d58dad9df45355bf6014ac9
[ "BSD-3-Clause" ]
2
2021-01-26T09:53:39.000Z
2022-03-22T09:01:47.000Z
from __future__ import absolute_import import six from string import Formatter class dontexplodedict(object): """ A dictionary that won't throw a KeyError and will return back a sensible default value to be used in string formatting. """ def __init__(self, d=None): self.data = d or {} def __getitem__(self, key): return self.data.get(key, '') class EventError(object): INVALID_DATA = 'invalid_data' INVALID_ATTRIBUTE = 'invalid_attribute' VALUE_TOO_LONG = 'value_too_long' UNKNOWN_ERROR = 'unknown_error' SECURITY_VIOLATION = 'security_violation' RESTRICTED_IP = 'restricted_ip' JS_GENERIC_FETCH_ERROR = 'js_generic_fetch_error' # deprecated in favor of FETCH_GENERIC_ERROR FETCH_GENERIC_ERROR = 'fetch_generic_error' JS_INVALID_HTTP_CODE = 'js_invalid_http_code' # deprecated in favor of FETCH_INVALID_HTTP_CODE FETCH_INVALID_HTTP_CODE = 'fetch_invalid_http_code' JS_INVALID_CONTENT = 'js_invalid_content' JS_NO_COLUMN = 'js_no_column' JS_MISSING_SOURCE = 'js_no_source' JS_INVALID_SOURCEMAP = 'js_invalid_source' JS_TOO_MANY_REMOTE_SOURCES = 'js_too_many_sources' JS_INVALID_SOURCE_ENCODING = 'js_invalid_source_encoding' FETCH_INVALID_ENCODING = 'fetch_invalid_source_encoding' JS_INVALID_SOURCEMAP_LOCATION = 'js_invalid_sourcemap_location' JS_TOO_LARGE = 'js_too_large' # deprecated in favor of FETCH_TOO_LARGE FETCH_TOO_LARGE = 'fetch_too_large' JS_FETCH_TIMEOUT = 'js_fetch_timeout' # deprecated in favor of FETCH_TIMEOUT FETCH_TIMEOUT = 'fetch_timeout' NATIVE_NO_CRASHED_THREAD = 'native_no_crashed_thread' NATIVE_INTERNAL_FAILURE = 'native_internal_failure' NATIVE_NO_SYMSYND = 'native_no_symsynd' NATIVE_BAD_DSYM = 'native_bad_dsym' NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM = 'native_optionally_bundled_dsym' NATIVE_MISSING_DSYM = 'native_missing_dsym' NATIVE_MISSING_SYSTEM_DSYM = 'native_missing_system_dsym' NATIVE_MISSING_SYMBOL = 'native_missing_symbol' NATIVE_SIMULATOR_FRAME = 'native_simulator_frame' NATIVE_UNKNOWN_IMAGE = 'native_unknown_image' PROGUARD_MISSING_MAPPING = 'proguard_missing_mapping' PROGUARD_MISSING_LINENO = 'proguard_missing_lineno' _messages = { INVALID_DATA: u'Discarded invalid value for parameter \'{name}\'', INVALID_ATTRIBUTE: u'Discarded invalid parameter \'{name}\'', VALUE_TOO_LONG: u'Discarded value for \'{name}\' due to exceeding maximum length', UNKNOWN_ERROR: u'Unknown error', SECURITY_VIOLATION: u'Cannot fetch resource due to security violation on {url}', RESTRICTED_IP: u'Cannot fetch resource due to restricted IP address on {url}', # deprecated in favor of FETCH_GENERIC_ERROR JS_GENERIC_FETCH_ERROR: u'Unable to fetch resource: {url}', FETCH_GENERIC_ERROR: u'Unable to fetch resource: {url}', JS_INVALID_HTTP_CODE: u'HTTP returned {value} response on {url}', # deprecated in favor of FETCH_INVALID_HTTP_CODE FETCH_INVALID_HTTP_CODE: u'HTTP returned {value} response on {url}', JS_INVALID_CONTENT: u'Source file was not JavaScript: {url}', JS_NO_COLUMN: u'Cannot expand sourcemap due to no column information for {url}', JS_MISSING_SOURCE: u'Source code was not found for {url}', JS_INVALID_SOURCEMAP: u'Sourcemap was invalid or not parseable: {url}', JS_TOO_MANY_REMOTE_SOURCES: u'The maximum number of remote source requests was made', JS_INVALID_SOURCE_ENCODING: u'Source file was not \'{value}\' encoding: {url}', FETCH_INVALID_ENCODING: u'Source file was not \'{value}\' encoding: {url}', JS_INVALID_SOURCEMAP_LOCATION: u'Invalid location in sourcemap: ({column}, {row})', # deprecated in favor of FETCH_TOO_LARGE JS_TOO_LARGE: u'Remote file too large: ({max_size:g}MB, {url})', FETCH_TOO_LARGE: u'Remote file too large: ({max_size:g}MB, {url})', # deprecated in favor of FETCH_TIMEOUT JS_FETCH_TIMEOUT: u'Remote file took too long to load: ({timeout}s, {url})', FETCH_TIMEOUT: u'Remote file took too long to load: ({timeout}s, {url})', NATIVE_NO_CRASHED_THREAD: u'No crashed thread found in crash report', NATIVE_INTERNAL_FAILURE: u'Internal failure when attempting to symbolicate: {error}', NATIVE_NO_SYMSYND: u'The symbolizer is not configured for this system.', NATIVE_BAD_DSYM: u'The debug symbol file used was broken.', NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM: u'An optional debug symbol file was missing.', NATIVE_MISSING_DSYM: u'A required debug symbol file was missing.', NATIVE_MISSING_SYSTEM_DSYM: u'A system debug symbol file was missing.', NATIVE_MISSING_SYMBOL: u'Unable to resolve a symbol.', NATIVE_SIMULATOR_FRAME: u'Encountered an unprocessable simulator frame.', NATIVE_UNKNOWN_IMAGE: u'An binary image is referenced that is unknown.', PROGUARD_MISSING_MAPPING: u'A proguard mapping file was missing.', PROGUARD_MISSING_LINENO: u'A proguard mapping file does not contain line info.', } @classmethod def get_message(cls, data): return Formatter().vformat( cls._messages[data['type']], [], dontexplodedict(data), ) def to_dict(self): return {k: v for k, v in six.iteritems(self) if k != 'type'}
50.018349
99
0.721937
from __future__ import absolute_import import six from string import Formatter class dontexplodedict(object): def __init__(self, d=None): self.data = d or {} def __getitem__(self, key): return self.data.get(key, '') class EventError(object): INVALID_DATA = 'invalid_data' INVALID_ATTRIBUTE = 'invalid_attribute' VALUE_TOO_LONG = 'value_too_long' UNKNOWN_ERROR = 'unknown_error' SECURITY_VIOLATION = 'security_violation' RESTRICTED_IP = 'restricted_ip' JS_GENERIC_FETCH_ERROR = 'js_generic_fetch_error' FETCH_GENERIC_ERROR = 'fetch_generic_error' JS_INVALID_HTTP_CODE = 'js_invalid_http_code' FETCH_INVALID_HTTP_CODE = 'fetch_invalid_http_code' JS_INVALID_CONTENT = 'js_invalid_content' JS_NO_COLUMN = 'js_no_column' JS_MISSING_SOURCE = 'js_no_source' JS_INVALID_SOURCEMAP = 'js_invalid_source' JS_TOO_MANY_REMOTE_SOURCES = 'js_too_many_sources' JS_INVALID_SOURCE_ENCODING = 'js_invalid_source_encoding' FETCH_INVALID_ENCODING = 'fetch_invalid_source_encoding' JS_INVALID_SOURCEMAP_LOCATION = 'js_invalid_sourcemap_location' JS_TOO_LARGE = 'js_too_large' FETCH_TOO_LARGE = 'fetch_too_large' JS_FETCH_TIMEOUT = 'js_fetch_timeout' FETCH_TIMEOUT = 'fetch_timeout' NATIVE_NO_CRASHED_THREAD = 'native_no_crashed_thread' NATIVE_INTERNAL_FAILURE = 'native_internal_failure' NATIVE_NO_SYMSYND = 'native_no_symsynd' NATIVE_BAD_DSYM = 'native_bad_dsym' NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM = 'native_optionally_bundled_dsym' NATIVE_MISSING_DSYM = 'native_missing_dsym' NATIVE_MISSING_SYSTEM_DSYM = 'native_missing_system_dsym' NATIVE_MISSING_SYMBOL = 'native_missing_symbol' NATIVE_SIMULATOR_FRAME = 'native_simulator_frame' NATIVE_UNKNOWN_IMAGE = 'native_unknown_image' PROGUARD_MISSING_MAPPING = 'proguard_missing_mapping' PROGUARD_MISSING_LINENO = 'proguard_missing_lineno' _messages = { INVALID_DATA: u'Discarded invalid value for parameter \'{name}\'', INVALID_ATTRIBUTE: u'Discarded invalid parameter \'{name}\'', VALUE_TOO_LONG: u'Discarded value for \'{name}\' due to exceeding maximum length', UNKNOWN_ERROR: u'Unknown error', SECURITY_VIOLATION: u'Cannot fetch resource due to security violation on {url}', RESTRICTED_IP: u'Cannot fetch resource due to restricted IP address on {url}', JS_GENERIC_FETCH_ERROR: u'Unable to fetch resource: {url}', FETCH_GENERIC_ERROR: u'Unable to fetch resource: {url}', JS_INVALID_HTTP_CODE: u'HTTP returned {value} response on {url}', FETCH_INVALID_HTTP_CODE: u'HTTP returned {value} response on {url}', JS_INVALID_CONTENT: u'Source file was not JavaScript: {url}', JS_NO_COLUMN: u'Cannot expand sourcemap due to no column information for {url}', JS_MISSING_SOURCE: u'Source code was not found for {url}', JS_INVALID_SOURCEMAP: u'Sourcemap was invalid or not parseable: {url}', JS_TOO_MANY_REMOTE_SOURCES: u'The maximum number of remote source requests was made', JS_INVALID_SOURCE_ENCODING: u'Source file was not \'{value}\' encoding: {url}', FETCH_INVALID_ENCODING: u'Source file was not \'{value}\' encoding: {url}', JS_INVALID_SOURCEMAP_LOCATION: u'Invalid location in sourcemap: ({column}, {row})', JS_TOO_LARGE: u'Remote file too large: ({max_size:g}MB, {url})', FETCH_TOO_LARGE: u'Remote file too large: ({max_size:g}MB, {url})', JS_FETCH_TIMEOUT: u'Remote file took too long to load: ({timeout}s, {url})', FETCH_TIMEOUT: u'Remote file took too long to load: ({timeout}s, {url})', NATIVE_NO_CRASHED_THREAD: u'No crashed thread found in crash report', NATIVE_INTERNAL_FAILURE: u'Internal failure when attempting to symbolicate: {error}', NATIVE_NO_SYMSYND: u'The symbolizer is not configured for this system.', NATIVE_BAD_DSYM: u'The debug symbol file used was broken.', NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM: u'An optional debug symbol file was missing.', NATIVE_MISSING_DSYM: u'A required debug symbol file was missing.', NATIVE_MISSING_SYSTEM_DSYM: u'A system debug symbol file was missing.', NATIVE_MISSING_SYMBOL: u'Unable to resolve a symbol.', NATIVE_SIMULATOR_FRAME: u'Encountered an unprocessable simulator frame.', NATIVE_UNKNOWN_IMAGE: u'An binary image is referenced that is unknown.', PROGUARD_MISSING_MAPPING: u'A proguard mapping file was missing.', PROGUARD_MISSING_LINENO: u'A proguard mapping file does not contain line info.', } @classmethod def get_message(cls, data): return Formatter().vformat( cls._messages[data['type']], [], dontexplodedict(data), ) def to_dict(self): return {k: v for k, v in six.iteritems(self) if k != 'type'}
true
true
f7041c77708c3d11ea491231f4684dd0218200ea
3,864
py
Python
python/fcdd/datasets/outlier_exposure/emnist.py
denix56/fcdd
d110aa8b141dc13f47156da913a6b4f9d64ddc74
[ "MIT" ]
null
null
null
python/fcdd/datasets/outlier_exposure/emnist.py
denix56/fcdd
d110aa8b141dc13f47156da913a6b4f9d64ddc74
[ "MIT" ]
null
null
null
python/fcdd/datasets/outlier_exposure/emnist.py
denix56/fcdd
d110aa8b141dc13f47156da913a6b4f9d64ddc74
[ "MIT" ]
null
null
null
import os.path as pt import numpy as np import torchvision.transforms as transforms import torch from torch.utils.data import DataLoader from torchvision.datasets import EMNIST def ceil(x: float): return int(np.ceil(x)) class MyEMNIST(EMNIST): """ Reimplements get_item to transform tensor input to pil image before applying transformation. """ def __getitem__(self, index): img, target = self.data[index], self.targets[index] # doing this so that it is consistent with all other datasets # to return a PIL Image img = transforms.ToPILImage()(img) if self.target_transform is not None: target = self.target_transform(target) if self.transform is not None: img = self.transform(img) return img, target class OEEMNIST(EMNIST): def __init__(self, size: torch.Size, root: str = None, split='letters', limit_var=20): # split = Train """ Outlier Exposure dataset for EMNIST. :param size: size of the samples in n x c x h x w, samples will be resized to h x w. If n is larger than the number of samples available in EMNIST, dataset will be enlarged by repetitions to fit n. This is important as exactly n images are extracted per iteration of the data_loader. For online supervision n should be set to 1 because only one sample is extracted at a time. :param root: root directory where data is found or is to be downloaded to. :param split: The dataset has 6 different splits: ``byclass``, ``bymerge``, ``balanced``, ``letters``, ``digits`` and ``mnist``. This argument specifies which one to use. :param limit_var: limits the number of different samples, i.e. randomly chooses limit_var many samples from all available ones to be the training data. """ assert len(size) == 3 and size[1] == size[2] root = pt.join(root, 'emnist', ) transform = transforms.Compose([ transforms.Resize((size[1], size[2])), transforms.ToTensor() ]) super().__init__(root, split, transform=transform, download=True) self.size = size self.data = self.data.transpose(1, 2) self.idx_to_class = {v: k for k, v in self.class_to_idx.items()} if limit_var is not None and limit_var < len(self): picks = np.random.choice(np.arange(self.data.size(0)), size=limit_var, replace=False) self.data = self.data[picks] self.targets = self.targets[picks] if limit_var is not None and limit_var > len(self): print( 'OEEMNIST shall be limited to {} samples, but Cifar100 contains only {} samples, thus using all.' .format(limit_var, len(self)) ) if len(self) < size[0]: rep = ceil(size[0] / len(self)) old = len(self) self.data = self.data.repeat(rep, 1, 1) self.targets = self.targets.repeat(rep) if rep != size[0] / old: import warnings warnings.warn( 'OEEMNIST has been limited to {} samples. ' 'Due to the requested size of {}, the dataset will be enlarged. ' 'But {} repetitions will make some samples appear more often than others in the dataset, ' 'because the final size after repetitions is {}, which is cut to {}' .format(limit_var, size[0], rep, len(self), size[0]) ) def data_loader(self): return DataLoader(dataset=self, batch_size=self.size[0], shuffle=True, num_workers=0) def __getitem__(self, index): sample, target = super().__getitem__(index) sample = sample.squeeze().mul(255).byte() return sample
42.933333
116
0.61206
import os.path as pt import numpy as np import torchvision.transforms as transforms import torch from torch.utils.data import DataLoader from torchvision.datasets import EMNIST def ceil(x: float): return int(np.ceil(x)) class MyEMNIST(EMNIST): def __getitem__(self, index): img, target = self.data[index], self.targets[index] img = transforms.ToPILImage()(img) if self.target_transform is not None: target = self.target_transform(target) if self.transform is not None: img = self.transform(img) return img, target class OEEMNIST(EMNIST): def __init__(self, size: torch.Size, root: str = None, split='letters', limit_var=20): assert len(size) == 3 and size[1] == size[2] root = pt.join(root, 'emnist', ) transform = transforms.Compose([ transforms.Resize((size[1], size[2])), transforms.ToTensor() ]) super().__init__(root, split, transform=transform, download=True) self.size = size self.data = self.data.transpose(1, 2) self.idx_to_class = {v: k for k, v in self.class_to_idx.items()} if limit_var is not None and limit_var < len(self): picks = np.random.choice(np.arange(self.data.size(0)), size=limit_var, replace=False) self.data = self.data[picks] self.targets = self.targets[picks] if limit_var is not None and limit_var > len(self): print( 'OEEMNIST shall be limited to {} samples, but Cifar100 contains only {} samples, thus using all.' .format(limit_var, len(self)) ) if len(self) < size[0]: rep = ceil(size[0] / len(self)) old = len(self) self.data = self.data.repeat(rep, 1, 1) self.targets = self.targets.repeat(rep) if rep != size[0] / old: import warnings warnings.warn( 'OEEMNIST has been limited to {} samples. ' 'Due to the requested size of {}, the dataset will be enlarged. ' 'But {} repetitions will make some samples appear more often than others in the dataset, ' 'because the final size after repetitions is {}, which is cut to {}' .format(limit_var, size[0], rep, len(self), size[0]) ) def data_loader(self): return DataLoader(dataset=self, batch_size=self.size[0], shuffle=True, num_workers=0) def __getitem__(self, index): sample, target = super().__getitem__(index) sample = sample.squeeze().mul(255).byte() return sample
true
true